From c8afc848403652f31f84eb362fc0696ee23cffca Mon Sep 17 00:00:00 2001 From: Tingluo Huang Date: Thu, 10 Oct 2019 00:52:42 -0400 Subject: [PATCH] GitHub Actions Runner --- .gitattributes | 87 + .github/ISSUE_TEMPLATE.md | 10 + .github/workflows/build.yml | 52 + .gitignore | 23 + LICENSE | 20 + README.md | 31 + assets.json | 20 + azure-pipelines-release.yml | 177 + azure-pipelines.yml | 95 + docs/contribute.md | 41 + docs/res/apple_med.png | Bin 0 -> 1394 bytes docs/res/apple_sm.png | Bin 0 -> 1263 bytes docs/res/github-graph.png | Bin 0 -> 162291 bytes docs/res/linux_med.png | Bin 0 -> 2131 bytes docs/res/linux_sm.png | Bin 0 -> 1695 bytes docs/res/redhat_med.png | Bin 0 -> 2255 bytes docs/res/redhat_sm.png | Bin 0 -> 1720 bytes docs/res/ubuntu_med.png | Bin 0 -> 1451 bytes docs/res/ubuntu_sm.png | Bin 0 -> 1366 bytes docs/res/win_med.png | Bin 0 -> 1494 bytes docs/res/win_sm.png | Bin 0 -> 1348 bytes docs/start/envlinux.md | 40 + docs/start/envosx.md | 10 + docs/start/envwin.md | 12 + images/arm/Dockerfile | 7 + images/centos6/Dockerfile | 150 + nonwindows.template.yml | 29 + releaseNote.md | 44 + src/Misc/dotnet-install.ps1 | 650 ++ src/Misc/dotnet-install.sh | 1025 ++ src/Misc/externals.sh | 148 + src/Misc/layoutbin/RunnerService.js | 91 + .../layoutbin/actions.runner.plist.template | 27 + .../layoutbin/actions.runner.service.template | 14 + src/Misc/layoutbin/darwin.svc.sh.template | 135 + src/Misc/layoutbin/installdependencies.sh | 298 + src/Misc/layoutbin/runsvc.sh | 20 + src/Misc/layoutbin/systemd.svc.sh.template | 143 + src/Misc/layoutbin/update.cmd.template | 143 + src/Misc/layoutbin/update.sh.template | 133 + src/Misc/layoutroot/config.cmd | 26 + src/Misc/layoutroot/config.sh | 86 + src/Misc/layoutroot/env.sh | 44 + src/Misc/layoutroot/run.cmd | 33 + src/Misc/layoutroot/run.sh | 51 + src/NuGet.Config | 10 + src/Runner.Common/ActionCommand.cs | 253 + src/Runner.Common/ActionResult.cs | 15 + src/Runner.Common/AsyncManualResetEvent.cs | 33 + .../Capabilities/CapabilitiesManager.cs | 73 + .../RunnerCapabilitiesProvider.cs | 86 + src/Runner.Common/CommandLineParser.cs | 128 + src/Runner.Common/ConfigurationStore.cs | 252 + src/Runner.Common/Constants.cs | 343 + src/Runner.Common/CredentialData.cs | 24 + src/Runner.Common/Exceptions.cs | 19 + src/Runner.Common/ExtensionManager.cs | 80 + src/Runner.Common/Extensions.cs | 30 + src/Runner.Common/HostContext.cs | 597 ++ src/Runner.Common/HostTraceListener.cs | 202 + src/Runner.Common/IExtension.cs | 9 + src/Runner.Common/JobNotification.cs | 296 + src/Runner.Common/JobServer.cs | 162 + src/Runner.Common/JobServerQueue.cs | 702 ++ src/Runner.Common/LocationServer.cs | 61 + src/Runner.Common/Logging.cs | 124 + src/Runner.Common/ProcessChannel.cs | 100 + src/Runner.Common/ProcessExtensions.cs | 396 + src/Runner.Common/ProcessInvoker.cs | 329 + src/Runner.Common/Runner.Common.csproj | 68 + src/Runner.Common/RunnerCertificateManager.cs | 231 + src/Runner.Common/RunnerCredentialStore.cs | 948 ++ src/Runner.Common/RunnerServer.cs | 355 + src/Runner.Common/RunnerService.cs | 39 + src/Runner.Common/RunnerWebProxy.cs | 196 + src/Runner.Common/StreamString.cs | 96 + src/Runner.Common/Terminal.cs | 198 + src/Runner.Common/ThrottlingReportHandler.cs | 65 + src/Runner.Common/TraceManager.cs | 88 + src/Runner.Common/TraceSetting.cs | 92 + src/Runner.Common/Tracing.cs | 128 + src/Runner.Common/Util/EnumUtil.cs | 18 + src/Runner.Common/Util/PlanUtil.cs | 28 + src/Runner.Common/Util/TaskResultUtil.cs | 79 + src/Runner.Common/Util/UnixUtil.cs | 79 + src/Runner.Common/Util/VarUtil.cs | 63 + src/Runner.Listener/Agent.cs | 493 + src/Runner.Listener/CommandSettings.cs | 467 + .../Configuration/ConfigurationManager.cs | 667 ++ .../Configuration/CredentialManager.cs | 91 + .../Configuration/CredentialProvider.cs | 231 + .../Configuration/IRSAKeyManager.cs | 108 + .../NativeWindowsServiceHelper.cs | 1319 +++ .../Configuration/OAuthCredential.cs | 49 + .../Configuration/OsxServiceControlManager.cs | 59 + .../Configuration/PromptManager.cs | 117 + .../RSAEncryptedFileKeyManager.cs | 87 + .../Configuration/RSAFileKeyManager.cs | 97 + .../Configuration/ServiceControlManager.cs | 63 + .../Configuration/SystemdControlManager.cs | 55 + .../Configuration/Validators.cs | 94 + .../WindowsServiceControlManager.cs | 172 + src/Runner.Listener/JobDispatcher.cs | 909 ++ src/Runner.Listener/MessageListener.cs | 407 + src/Runner.Listener/Program.cs | 140 + src/Runner.Listener/Runner.Listener.csproj | 70 + src/Runner.Listener/SelfUpdater.cs | 461 + src/Runner.PluginHost/Program.cs | 109 + .../Runner.PluginHost.csproj | 63 + src/Runner.Plugins/Artifact/BuildServer.cs | 58 + .../Artifact/DownloadArtifact.cs | 79 + .../Artifact/FileContainerServer.cs | 660 ++ .../Artifact/PublishArtifact.cs | 90 + .../Repository/GitCliManager.cs | 686 ++ .../Repository/v1.0/GitSourceProvider.cs | 703 ++ .../Repository/v1.0/RepositoryPlugin.cs | 175 + .../Repository/v1.1/GitSourceProvider.cs | 740 ++ .../Repository/v1.1/RepositoryPlugin.cs | 180 + src/Runner.Plugins/Runner.Plugins.csproj | 60 + src/Runner.Sdk/ActionPlugin.cs | 314 + src/Runner.Sdk/ITraceWriter.cs | 8 + src/Runner.Sdk/ProcessInvoker.cs | 892 ++ src/Runner.Sdk/Runner.Sdk.csproj | 65 + .../RunnerClientCertificateManager.cs | 40 + src/Runner.Sdk/RunnerWebProxyCore.cs | 104 + src/Runner.Sdk/Util/ArgUtil.cs | 78 + src/Runner.Sdk/Util/IOUtil.cs | 467 + src/Runner.Sdk/Util/PathUtil.cs | 36 + src/Runner.Sdk/Util/StringUtil.cs | 126 + src/Runner.Sdk/Util/UrlUtil.cs | 37 + src/Runner.Sdk/Util/VssUtil.cs | 99 + src/Runner.Sdk/Util/WhichUtil.cs | 120 + src/Runner.Service/Windows/App.config | 6 + src/Runner.Service/Windows/FinalPublicKey.snk | Bin 0 -> 160 bytes src/Runner.Service/Windows/Program.cs | 55 + .../Windows/Properties/AssemblyInfo.cs | 36 + .../Windows/Resource.Designer.cs | 144 + src/Runner.Service/Windows/Resource.resx | 147 + .../Windows/RunnerService.Designer.cs | 37 + src/Runner.Service/Windows/RunnerService.cs | 354 + .../Windows/RunnerService.csproj | 83 + src/Runner.Worker/ActionCommandManager.cs | 527 + src/Runner.Worker/ActionManager.cs | 847 ++ src/Runner.Worker/ActionManifestManager.cs | 980 ++ src/Runner.Worker/ActionRunner.cs | 324 + src/Runner.Worker/Container/ContainerInfo.cs | 318 + .../Container/DockerCommandManager.cs | 433 + src/Runner.Worker/Container/DockerUtil.cs | 49 + .../ContainerOperationProvider.cs | 414 + src/Runner.Worker/DiagnosticLogManager.cs | 209 + src/Runner.Worker/ExecutionContext.cs | 1025 ++ src/Runner.Worker/ExpressionManager.cs | 162 + src/Runner.Worker/GitHubContext.cs | 35 + .../Handlers/ContainerActionHandler.cs | 203 + src/Runner.Worker/Handlers/Handler.cs | 177 + src/Runner.Worker/Handlers/HandlerFactory.cs | 85 + .../Handlers/NodeScriptActionHandler.cs | 134 + src/Runner.Worker/Handlers/OutputManager.cs | 319 + .../Handlers/RunnerPluginHandler.cs | 58 + src/Runner.Worker/Handlers/ScriptHandler.cs | 241 + .../Handlers/ScriptHandlerHelpers.cs | 83 + src/Runner.Worker/Handlers/StepHost.cs | 236 + src/Runner.Worker/IEnvironmentContextData.cs | 7 + src/Runner.Worker/IssueMatcher.cs | 445 + src/Runner.Worker/JobContext.cs | 60 + src/Runner.Worker/JobExtension.cs | 399 + src/Runner.Worker/JobExtensionRunner.cs | 37 + src/Runner.Worker/JobRunner.cs | 292 + src/Runner.Worker/PipelineDirectoryManager.cs | 211 + src/Runner.Worker/Program.cs | 68 + src/Runner.Worker/Runner.Worker.csproj | 74 + src/Runner.Worker/RunnerContext.cs | 17 + src/Runner.Worker/RunnerPluginManager.cs | 149 + src/Runner.Worker/StepsContext.cs | 88 + src/Runner.Worker/StepsRunner.cs | 465 + src/Runner.Worker/TempDirectoryManager.cs | 62 + src/Runner.Worker/TrackingConfig.cs | 119 + src/Runner.Worker/TrackingManager.cs | 74 + src/Runner.Worker/Variables.cs | 221 + src/Runner.Worker/Worker.cs | 215 + src/Runner.Worker/WorkerUtilties.cs | 92 + src/Runner.Worker/action_yaml.json | 106 + src/Sdk/AadAuthentication/CookieUtility.cs | 264 + src/Sdk/AadAuthentication/VssAadCredential.cs | 95 + src/Sdk/AadAuthentication/VssAadSettings.cs | 89 + src/Sdk/AadAuthentication/VssAadToken.cs | 124 + .../AadAuthentication/VssAadTokenProvider.cs | 77 + .../VssFederatedCredential.cs | 172 + .../AadAuthentication/VssFederatedToken.cs | 84 + .../VssFederatedTokenProvider.cs | 157 + .../Api/AgentTargetExecutionType.cs | 12 + .../BuildWebApi/Api/ArtifactResourceTypes.cs | 76 + .../Api/BuildDefinitionExtensions.cs | 17 + .../BuildWebApi/Api/BuildDefinitionHelpers.cs | 44 + src/Sdk/BuildWebApi/Api/BuildHttpClient.cs | 1489 +++ .../Api/BuildHttpClientCompatBase.cs | 2463 +++++ src/Sdk/BuildWebApi/Api/BuildIssueKeys.cs | 21 + .../BuildWebApi/Api/BuildOrchestrationType.cs | 12 + src/Sdk/BuildWebApi/Api/BuildPermissions.cs | 40 + src/Sdk/BuildWebApi/Api/BuildResourceIds.cs | 165 + .../Api/BuildTemplateCategories.cs | 29 + src/Sdk/BuildWebApi/Api/BuildVariables.cs | 87 + .../Api/Contracts/AgentPoolQueue.cs | 134 + .../Api/Contracts/AgentPoolQueueTarget.cs | 102 + .../Api/Contracts/AgentSpecification.cs | 28 + .../Contracts/AgentTargetExecutionOptions.cs | 51 + ...gentTargetExecutionOptionsJsonConverter.cs | 42 + .../Api/Contracts/ArtifactResource.cs | 95 + .../BuildWebApi/Api/Contracts/Attachment.cs | 51 + src/Sdk/BuildWebApi/Api/Contracts/Build.cs | 605 ++ .../Api/Contracts/BuildArtifact.cs | 63 + .../BuildWebApi/Api/Contracts/BuildBadge.cs | 36 + .../Api/Contracts/BuildDefinition.cs | 365 + .../Api/Contracts/BuildDefinitionReference.cs | 162 + .../Api/Contracts/BuildDefinitionRevision.cs | 86 + .../Api/Contracts/BuildDefinitionStep.cs | 250 + .../Api/Contracts/BuildDefinitionTemplate.cs | 122 + .../Api/Contracts/BuildDefinitionVariable.cs | 69 + src/Sdk/BuildWebApi/Api/Contracts/BuildLog.cs | 53 + .../Api/Contracts/BuildLogReference.cs | 54 + .../BuildWebApi/Api/Contracts/BuildMetric.cs | 63 + .../BuildWebApi/Api/Contracts/BuildOption.cs | 67 + .../Api/Contracts/BuildOptionDefinition.cs | 100 + .../BuildOptionDefinitionReference.cs | 33 + .../Contracts/BuildOptionGroupDefinition.cs | 53 + .../Contracts/BuildOptionInputDefinition.cs | 144 + .../BuildWebApi/Api/Contracts/BuildProcess.cs | 45 + .../Contracts/BuildProcessJsonConverter.cs | 68 + .../Api/Contracts/BuildProcessResources.cs | 247 + .../Api/Contracts/BuildReference.cs | 138 + .../Api/Contracts/BuildReportMetadata.cs | 55 + .../Api/Contracts/BuildRepository.cs | 185 + .../Contracts/BuildRequestValidationResult.cs | 43 + .../Api/Contracts/BuildResourceUsage.cs | 64 + .../Api/Contracts/BuildSettings.cs | 54 + .../BuildWebApi/Api/Contracts/BuildTrigger.cs | 446 + .../Contracts/BuildTriggerJsonConverter.cs | 115 + .../Api/Contracts/BuildWorkspace.cs | 68 + src/Sdk/BuildWebApi/Api/Contracts/Change.cs | 114 + .../Api/Contracts/DefinitionReference.cs | 195 + .../Contracts/DefinitionResourceReference.cs | 61 + src/Sdk/BuildWebApi/Api/Contracts/Demand.cs | 117 + .../BuildWebApi/Api/Contracts/DemandEquals.cs | 46 + .../BuildWebApi/Api/Contracts/DemandExists.cs | 41 + .../Api/Contracts/DemandJsonConverter.cs | 46 + .../BuildWebApi/Api/Contracts/Dependency.cs | 43 + .../Deprecated/BuildDefinition3_2.cs | 441 + .../Deprecated/BuildDefinitionReference3_2.cs | 127 + .../Deprecated/BuildDefinitionTemplate3_2.cs | 123 + .../Api/Contracts/DesignerProcess.cs | 48 + .../Api/Contracts/DesignerProcessTarget.cs | 27 + .../Api/Contracts/DockerProcess.cs | 23 + .../Api/Contracts/DockerProcessTarget.cs | 21 + src/Sdk/BuildWebApi/Api/Contracts/Folder.cs | 119 + src/Sdk/BuildWebApi/Api/Contracts/Forks.cs | 43 + src/Sdk/BuildWebApi/Api/Contracts/Issue.cs | 98 + .../Api/Contracts/JustInTimeProcess.cs | 20 + .../MultipleAgentExecutionOptions.cs | 46 + src/Sdk/BuildWebApi/Api/Contracts/Phase.cs | 186 + .../BuildWebApi/Api/Contracts/PhaseTarget.cs | 46 + .../Api/Contracts/PhaseTargetJsonConverter.cs | 36 + .../Api/Contracts/PropertyValue.cs | 37 + .../BuildWebApi/Api/Contracts/PullRequest.cs | 91 + .../Api/Contracts/RepositoryWebhook.cs | 68 + .../Api/Contracts/RequestShallowReference.cs | 30 + .../Api/Contracts/RetentionPolicy.cs | 171 + src/Sdk/BuildWebApi/Api/Contracts/Schedule.cs | 89 + .../BuildWebApi/Api/Contracts/ServerTarget.cs | 33 + .../Contracts/ServerTargetExecutionOptions.cs | 53 + ...rverTargetExecutionOptionsJsonConverter.cs | 36 + .../Api/Contracts/ShallowReference.cs | 35 + .../Api/Contracts/SourceProviderAttributes.cs | 181 + .../Api/Contracts/SourceRelatedWorkItem.cs | 84 + .../Api/Contracts/SourceRepository.cs | 171 + .../Api/Contracts/SourceRepositoryItem.cs | 36 + .../BuildWebApi/Api/Contracts/SvnWorkspace.cs | 88 + .../Api/Contracts/TaskAgentPoolReference.cs | 66 + .../TaskOrchestrationPlanGroupReference.cs | 30 + .../TaskOrchestrationPlanReference.cs | 50 + .../Api/Contracts/TaskReference.cs | 71 + src/Sdk/BuildWebApi/Api/Contracts/Timeline.cs | 89 + .../Api/Contracts/TimelineAttempt.cs | 54 + .../Api/Contracts/TimelineRecord.cs | 309 + .../Api/Contracts/TimelineReference.cs | 56 + .../Api/Contracts/VariableGroup.cs | 98 + .../Api/Contracts/VariableGroupReference.cs | 44 + ...ariableMultipliersAgentExecutionOptions.cs | 84 + ...riableMultipliersServerExecutionOptions.cs | 84 + .../Api/Contracts/WorkspaceTemplate.cs | 141 + .../BuildWebApi/Api/Contracts/YamlProcess.cs | 76 + src/Sdk/BuildWebApi/Api/DefinitionMetrics.cs | 35 + .../Api/DefinitionReferenceJsonConverter.cs | 112 + src/Sdk/BuildWebApi/Api/EndpointData.cs | 51 + src/Sdk/BuildWebApi/Api/Enumerations.cs | 855 ++ .../Api/Events/BuildArtifactAddedEvent.cs | 25 + .../Api/Events/BuildChangesCalculatedEvent.cs | 26 + .../Api/Events/BuildDefinitionChangedEvent.cs | 34 + .../Events/BuildDefinitionChangingEvent.cs | 42 + .../Api/Events/BuildDeletedEvent.cs | 21 + .../Api/Events/BuildDestroyedEvent.cs | 23 + src/Sdk/BuildWebApi/Api/Events/BuildEvent.cs | 62 + src/Sdk/BuildWebApi/Api/Events/BuildEvents.cs | 17 + .../Api/Events/BuildPollingSummaryEvent.cs | 25 + .../Api/Events/BuildQueuedEvent.cs | 15 + .../Api/Events/BuildStartedEvent.cs | 15 + .../Api/Events/BuildTagsAddedEvent.cs | 34 + .../Api/Events/BuildUpdatedEvent.cs | 21 + .../Api/Events/BuildsDeletedEvent.cs | 62 + .../BuildWebApi/Api/Events/ConsoleLogEvent.cs | 69 + .../Api/Events/RealTimeBuildEvent.cs | 21 + .../Api/Events/SyncBuildCompletedEvent.cs | 19 + .../Api/Events/SyncBuildStartedEvent.cs | 19 + ...TaskOrchestrationPlanGroupsStartedEvent.cs | 17 + .../Api/Events/TimelineRecordsUpdatedEvent.cs | 25 + src/Sdk/BuildWebApi/Api/Exceptions.cs | 1632 +++ .../Api/Generated/BuildHttpClientBase.cs | 5744 +++++++++++ .../IVariableMultiplierExecutionOptions.cs | 24 + src/Sdk/BuildWebApi/Api/Links.cs | 18 + .../BuildWebApi/Api/MetricAggregationTypes.cs | 19 + src/Sdk/BuildWebApi/Api/PhaseTargetType.cs | 12 + src/Sdk/BuildWebApi/Api/ProcessType.cs | 29 + src/Sdk/BuildWebApi/Api/ReportTypes.cs | 15 + .../BuildWebApi/Api/RepositoryProperties.cs | 72 + src/Sdk/BuildWebApi/Api/RepositoryTypes.cs | 29 + src/Sdk/BuildWebApi/Api/Security.cs | 55 + .../BuildWebApi/Api/SerializationHelper.cs | 39 + .../Api/ServerTargetExecutionType.cs | 11 + src/Sdk/BuildWebApi/Api/SettingsSourceType.cs | 14 + .../Api/TypePropertyJsonConverter.cs | 114 + .../BuildWebApi/Api/WellKnownBuildOptions.cs | 11 + .../Api/WellKnownDataProviderKeys.cs | 44 + .../Authentication/FederatedCredential.cs | 35 + .../HttpRequestMessageWrapper.cs | 66 + .../HttpResponseMessageWrapper.cs | 58 + .../Common/Authentication/IHttpHeaders.cs | 14 + .../Common/Authentication/IHttpRequest.cs | 23 + .../Common/Authentication/IHttpResponse.cs | 17 + .../Authentication/IVssCredentialPrompt.cs | 29 + .../Authentication/IVssCredentialStorage.cs | 23 + .../Common/Authentication/IssuedToken.cs | 113 + .../Authentication/IssuedTokenCredential.cs | 148 + .../Authentication/IssuedTokenProvider.cs | 545 + .../Authentication/VssBasicCredential.cs | 92 + .../Common/Authentication/VssBasicToken.cs | 63 + .../Authentication/VssBasicTokenProvider.cs | 39 + .../Common/Authentication/VssCredentials.cs | 611 ++ .../VssServiceIdentityCredential.cs | 164 + .../Authentication/VssServiceIdentityToken.cs | 114 + .../VssServiceIdentityTokenProvider.cs | 201 + .../Authentication/WindowsCredential.cs | 137 + .../Common/Authentication/WindowsToken.cs | 39 + .../Authentication/WindowsTokenProvider.cs | 40 + .../Common/ClientStorage/IVssClientStorage.cs | 80 + .../Common/ClientStorage/VssFileStorage.cs | 623 ++ .../HttpRequestMessageExtensions.cs | 31 + .../Common/Diagnostics/VssHttpEventSource.cs | 1173 +++ .../Common/Diagnostics/VssHttpMethod.cs | 15 + .../Common/Diagnostics/VssTraceActivity.cs | 150 + .../Common/ExceptionMappingAttribute.cs | 54 + .../Exceptions/AuthenticationExceptions.cs | 58 + .../Common/Exceptions/CommonExceptions.cs | 70 + .../Common/Exceptions/PropertyExceptions.cs | 63 + .../Common/GenerateConstantAttributes.cs | 98 + .../Common/IVssClientCertificateManager.cs | 13 + src/Sdk/Common/Common/IVssHttpRetryInfo.cs | 19 + .../Performance/PerformanceTimerConstants.cs | 26 + .../Performance/PerformanceTimingGroup.cs | 61 + .../Common/TaskCancellationExtensions.cs | 107 + .../Common/Common/Utility/ArgumentUtility.cs | 1248 +++ src/Sdk/Common/Common/Utility/ArrayUtility.cs | 148 + .../Common/Utility/BackoffTimerHelper.cs | 38 + .../Common/Utility/CollectionsExtensions.cs | 38 + .../Common/Common/Utility/ConvertUtility.cs | 29 + .../Common/Utility/DictionaryExtensions.cs | 655 ++ .../Common/Utility/EnumerableExtensions.cs | 422 + .../Utility/ExpectedExceptionExtensions.cs | 62 + src/Sdk/Common/Common/Utility/HttpHeaders.cs | 91 + .../Common/Common/Utility/LongPathUtility.cs | 553 ++ .../Common/Utility/PartitioningResults.cs | 26 + src/Sdk/Common/Common/Utility/PathUtility.cs | 40 + .../Common/Utility/PrimitiveExtensions.cs | 89 + .../Common/Utility/PropertyValidation.cs | 361 + .../Common/Common/Utility/SecretUtility.cs | 253 + .../Common/Common/Utility/SecureCompare.cs | 49 + src/Sdk/Common/Common/Utility/StreamParser.cs | 188 + .../Common/Utility/TypeExtensionMethods.cs | 294 + .../Common/Common/Utility/UriExtensions.cs | 160 + src/Sdk/Common/Common/Utility/UriUtility.cs | 2204 +++++ .../Common/Utility/VssStringComparer.cs | 286 + src/Sdk/Common/Common/Utility/XmlUtility.cs | 1489 +++ src/Sdk/Common/Common/VssCommonConstants.cs | 1054 ++ src/Sdk/Common/Common/VssException.cs | 291 + .../Common/Common/VssHttpMessageHandler.cs | 682 ++ .../Common/VssHttpMessageHandlerTraceInfo.cs | 109 + .../Common/Common/VssHttpRequestSettings.cs | 416 + .../Common/VssHttpRetryMessageHandler.cs | 232 + src/Sdk/Common/Common/VssHttpRetryOptions.cs | 200 + src/Sdk/Common/Common/VssNetworkHelper.cs | 236 + .../Common/VssPerformanceEventSource.cs | 498 + src/Sdk/Common/EmbeddedVersionInfo.cs | 39 + src/Sdk/CoreWebApi/Core/ProjectClasses.cs | 80 + .../CoreWebApi/Core/TeamProjectReference.cs | 105 + .../Contracts/AuthorizationHeader.cs | 16 + .../Contracts/DataSourceBinding.cs | 149 + .../Contracts/ProcessParameters.cs | 163 + .../Contracts/TaskInputDefinition.cs | 254 + .../DTContracts/Contracts/TaskInputType.cs | 14 + .../Contracts/TaskInputValidation.cs | 59 + .../Contracts/TaskSourceDefinition.cs | 74 + src/Sdk/DTExpressions/Expressions/AndNode.cs | 22 + .../DTExpressions/Expressions/CoalesceNode.cs | 31 + .../CollectionAccessors/JArrayAccessor.cs | 31 + .../CollectionAccessors/JObjectAccessor.cs | 56 + .../JsonDictionaryContractAccessor.cs | 106 + .../JsonObjectContractAccessor.cs | 89 + .../ListOfObjectAccessor.cs | 30 + ...eadOnlyDictionaryOfStringObjectAccessor.cs | 37 + ...eadOnlyDictionaryOfStringStringAccessor.cs | 54 + .../ReadOnlyListOfObjectAccessor.cs | 24 + .../Expressions/ContainerNode.cs | 49 + .../DTExpressions/Expressions/ContainsNode.cs | 16 + .../Expressions/ContainsValueNode.cs | 46 + .../Expressions/ConversionResult.cs | 19 + .../DTExpressions/Expressions/EndsWithNode.cs | 16 + .../DTExpressions/Expressions/EqualNode.cs | 14 + .../Expressions/EvaluationContext.cs | 78 + .../Expressions/EvaluationMemory.cs | 111 + .../Expressions/EvaluationOptions.cs | 43 + .../Expressions/EvaluationResult.cs | 828 ++ .../Expressions/EvaluationTraceWriter.cs | 37 + .../Expressions/ExpressionConstants.cs | 52 + .../Expressions/ExpressionException.cs | 22 + .../Expressions/ExpressionNode.cs | 494 + .../Expressions/ExpressionParser.cs | 547 + .../Expressions/ExpressionParserOptions.cs | 13 + .../Expressions/ExpressionUtil.cs | 211 + .../DTExpressions/Expressions/FormatNode.cs | 394 + .../DTExpressions/Expressions/FunctionInfo.cs | 26 + .../DTExpressions/Expressions/FunctionNode.cs | 45 + .../Expressions/GreaterThanNode.cs | 14 + .../Expressions/GreaterThanOrEqualNode.cs | 14 + src/Sdk/DTExpressions/Expressions/IBoolean.cs | 12 + .../Expressions/IExpressionNode.cs | 70 + .../Expressions/IFunctionInfo.cs | 12 + .../Expressions/INamedValueInfo.cs | 10 + src/Sdk/DTExpressions/Expressions/INumber.cs | 12 + .../Expressions/IReadOnlyArray.cs | 11 + .../Expressions/IReadOnlyObject.cs | 11 + src/Sdk/DTExpressions/Expressions/IString.cs | 12 + .../DTExpressions/Expressions/ITraceWriter.cs | 10 + src/Sdk/DTExpressions/Expressions/InNode.cs | 24 + .../DTExpressions/Expressions/IndexerNode.cs | 452 + src/Sdk/DTExpressions/Expressions/JoinNode.cs | 69 + .../DTExpressions/Expressions/LessThanNode.cs | 14 + .../Expressions/LessThanOrEqualNode.cs | 14 + .../Expressions/LexicalAnalyzer.cs | 293 + .../Expressions/LiteralValueNode.cs | 47 + .../Expressions/MemoryCounter.cs | 166 + .../Expressions/NamedValueInfo.cs | 20 + .../Expressions/NamedValueNode.cs | 24 + .../DTExpressions/Expressions/NotEqualNode.cs | 14 + .../DTExpressions/Expressions/NotInNode.cs | 24 + src/Sdk/DTExpressions/Expressions/NotNode.cs | 14 + src/Sdk/DTExpressions/Expressions/OrNode.cs | 22 + .../Expressions/ParseException.cs | 65 + .../Expressions/ParseExceptionKind.cs | 14 + .../DTExpressions/Expressions/ResultMemory.cs | 58 + .../Expressions/StartsWithNode.cs | 16 + src/Sdk/DTExpressions/Expressions/Token.cs | 28 + .../DTExpressions/Expressions/TokenKind.cs | 29 + .../Expressions/TypeCastException.cs | 67 + .../Expressions/UnknownFunctionNode.cs | 13 + .../Expressions/UnknownNamedValueNode.cs | 13 + .../DTExpressions/Expressions/ValueKind.cs | 17 + src/Sdk/DTExpressions/Expressions/XOrNode.cs | 14 + .../Expressions2/EvaluationOptions.cs | 23 + .../Expressions2/EvaluationResult.cs | 453 + .../Expressions2/ExpressionConstants.cs | 60 + .../Expressions2/ExpressionException.cs | 24 + .../Expressions2/ExpressionParser.cs | 471 + .../Expressions2/FunctionInfo.cs | 29 + .../Expressions2/IExpressionNode.cs | 25 + .../Expressions2/IExpressionNodeExtensions.cs | 237 + .../Expressions2/IFunctionInfo.cs | 15 + .../Expressions2/INamedValueInfo.cs | 13 + .../Expressions2/ITraceWriter.cs | 12 + .../Expressions2/NamedValueInfo.cs | 23 + .../Expressions2/ParseException.cs | 68 + .../Expressions2/ParseExceptionKind.cs | 14 + .../Expressions2/Sdk/Container.cs | 20 + .../Expressions2/Sdk/EvaluationContext.cs | 78 + .../Expressions2/Sdk/EvaluationMemory.cs | 112 + .../Expressions2/Sdk/EvaluationTraceWriter.cs | 37 + .../Expressions2/Sdk/ExpressionNode.cs | 192 + .../Expressions2/Sdk/ExpressionUtility.cs | 265 + .../Expressions2/Sdk/Function.cs | 45 + .../Expressions2/Sdk/Functions/Contains.cs | 44 + .../Expressions2/Sdk/Functions/EndsWith.cs | 30 + .../Expressions2/Sdk/Functions/Format.cs | 298 + .../Expressions2/Sdk/Functions/HashFiles.cs | 93 + .../Expressions2/Sdk/Functions/Join.cs | 74 + .../Expressions2/Sdk/Functions/NoOperation.cs | 22 + .../Expressions2/Sdk/Functions/StartsWith.cs | 30 + .../Expressions2/Sdk/Functions/ToJson.cs | 390 + .../Expressions2/Sdk/IBoolean.cs | 11 + .../DTExpressions2/Expressions2/Sdk/INull.cs | 9 + .../Expressions2/Sdk/INumber.cs | 11 + .../Expressions2/Sdk/IReadOnlyArray.cs | 16 + .../Expressions2/Sdk/IReadOnlyObject.cs | 27 + .../Expressions2/Sdk/IString.cs | 11 + .../Expressions2/Sdk/Literal.cs | 43 + .../Expressions2/Sdk/MemoryCounter.cs | 94 + .../Expressions2/Sdk/NamedValue.cs | 24 + .../Expressions2/Sdk/NoOperationNamedValue.cs | 19 + .../Expressions2/Sdk/Operators/And.cs | 51 + .../Expressions2/Sdk/Operators/Equal.cs | 44 + .../Expressions2/Sdk/Operators/GreaterThan.cs | 44 + .../Sdk/Operators/GreaterThanOrEqual.cs | 44 + .../Expressions2/Sdk/Operators/Index.cs | 286 + .../Expressions2/Sdk/Operators/LessThan.cs | 44 + .../Sdk/Operators/LessThanOrEqual.cs | 44 + .../Expressions2/Sdk/Operators/Not.cs | 41 + .../Expressions2/Sdk/Operators/NotEqual.cs | 44 + .../Expressions2/Sdk/Operators/Or.cs | 51 + .../Expressions2/Sdk/ResultMemory.cs | 58 + .../Expressions2/Sdk/Wildcard.cs | 32 + .../Expressions2/Tokens/Associativity.cs | 9 + .../Expressions2/Tokens/LexicalAnalyzer.cs | 491 + .../Expressions2/Tokens/Token.cs | 209 + .../Expressions2/Tokens/TokenKind.cs | 28 + .../DTExpressions2/Expressions2/ValueKind.cs | 15 + .../Generated/TaskAgentHttpClientBase.cs | 8786 +++++++++++++++++ .../Generated/TaskHttpClientBase.cs | 800 ++ src/Sdk/DTLogging/Logging/ISecret.cs | 13 + src/Sdk/DTLogging/Logging/ISecretMasker.cs | 15 + src/Sdk/DTLogging/Logging/RegexSecret.cs | 50 + .../DTLogging/Logging/ReplacementPosition.cs | 29 + src/Sdk/DTLogging/Logging/SecretMasker.cs | 298 + src/Sdk/DTLogging/Logging/ValueEncoders.cs | 116 + src/Sdk/DTLogging/Logging/ValueSecret.cs | 48 + .../ObjectTemplating/ContextValueNode.cs | 19 + .../ObjectTemplating/EmptyTraceWriter.cs | 27 + .../ObjectTemplating/ExpressionTraceWriter.cs | 27 + .../ObjectTemplating/IObjectReader.cs | 26 + .../ObjectTemplating/IObjectWriter.cs | 31 + .../ObjectTemplating/ITraceWriter.cs | 21 + .../ITraceWriterExtensions.cs | 10 + .../Schema/BooleanDefinition.cs | 54 + .../ObjectTemplating/Schema/Definition.cs | 49 + .../ObjectTemplating/Schema/DefinitionType.cs | 13 + .../Schema/MappingDefinition.cs | 136 + .../ObjectTemplating/Schema/NullDefinition.cs | 54 + .../Schema/NumberDefinition.cs | 54 + .../Schema/OneOfDefinition.cs | 209 + .../ObjectTemplating/Schema/PropertyValue.cs | 18 + .../Schema/ScalarDefinition.cs | 19 + .../Schema/SequenceDefinition.cs | 64 + .../Schema/StringDefinition.cs | 104 + .../ObjectTemplating/Schema/TemplateSchema.cs | 480 + .../ObjectTemplating/TemplateConstants.cs | 56 + .../ObjectTemplating/TemplateContext.cs | 231 + .../ObjectTemplating/TemplateEvaluator.cs | 433 + .../ObjectTemplating/TemplateException.cs | 90 + .../ObjectTemplating/TemplateMemory.cs | 302 + .../ObjectTemplating/TemplateReader.cs | 818 ++ .../ObjectTemplating/TemplateUnraveler.cs | 1210 +++ .../TemplateValidationError.cs | 62 + .../TemplateValidationErrors.cs | 110 + .../ObjectTemplating/TemplateWriter.cs | 72 + .../Tokens/BasicExpressionToken.cs | 146 + .../ObjectTemplating/Tokens/BooleanToken.cs | 44 + .../Tokens/ExpressionToken.cs | 64 + .../Tokens/InsertExpressionToken.cs | 31 + .../ObjectTemplating/Tokens/LiteralToken.cs | 22 + .../ObjectTemplating/Tokens/MappingToken.cs | 245 + .../ObjectTemplating/Tokens/NullToken.cs | 32 + .../ObjectTemplating/Tokens/NumberToken.cs | 45 + .../ObjectTemplating/Tokens/ScalarToken.cs | 36 + .../ObjectTemplating/Tokens/SequenceToken.cs | 151 + .../ObjectTemplating/Tokens/StringToken.cs | 64 + .../ObjectTemplating/Tokens/TemplateToken.cs | 290 + .../Tokens/TemplateTokenExtensions.cs | 221 + .../Tokens/TemplateTokenJsonConverter.cs | 332 + .../ObjectTemplating/Tokens/TokenType.cs | 23 + src/Sdk/DTPipelines/Pipelines/ActionStep.cs | 61 + .../ActionStepDefinitionReference.cs | 153 + .../ActionStepDefinitionReferenceConverter.cs | 82 + .../Pipelines/AgentJobRequestMessage.cs | 397 + .../Pipelines/AgentJobRequestMessageUtil.cs | 769 ++ .../Pipelines/AgentPoolReference.cs | 38 + .../DTPipelines/Pipelines/AgentPoolStore.cs | 116 + .../DTPipelines/Pipelines/AgentPoolTarget.cs | 169 + .../Pipelines/AgentQueueReference.cs | 38 + .../DTPipelines/Pipelines/AgentQueueStore.cs | 161 + .../DTPipelines/Pipelines/AgentQueueTarget.cs | 647 ++ .../Pipelines/Artifacts/ArtifactConstants.cs | 15 + .../Artifacts/DownloadStepExtensions.cs | 150 + .../Pipelines/Artifacts/IArtifactResolver.cs | 49 + .../Artifacts/PipelineArtifactConstants.cs | 113 + .../Artifacts/YamlArtifactConstants.cs | 16 + src/Sdk/DTPipelines/Pipelines/BuildOptions.cs | 119 + .../DTPipelines/Pipelines/BuildResource.cs | 68 + .../Checkpoints/CheckpointContext.cs | 63 + .../Checkpoints/CheckpointDecision.cs | 36 + .../Pipelines/Checkpoints/CheckpointScope.cs | 56 + .../Pipelines/Checkpoints/ResourceInfo.cs | 22 + .../DTPipelines/Pipelines/ConditionResult.cs | 25 + .../Pipelines/ContainerResource.cs | 113 + .../Pipelines/ContextData/ArrayContextData.cs | 113 + .../ContextData/BooleanContextData.cs | 62 + .../CaseSensitiveDictionaryContextData.cs | 293 + .../ContextData/DictionaryContextData.cs | 293 + .../Pipelines/ContextData/JTokenExtensions.cs | 64 + .../ContextData/NumberContextData.cs | 77 + .../ContextData/PipelineContextData.cs | 31 + .../PipelineContextDataExtensions.cs | 290 + .../PipelineContextDataJsonConverter.cs | 200 + .../ContextData/PipelineContextDataType.cs | 19 + .../ContextData/StringContextData.cs | 76 + .../ContextData/TemplateMemoryExtensions.cs | 65 + .../ContextData/TemplateTokenExtensions.cs | 78 + src/Sdk/DTPipelines/Pipelines/ContextScope.cs | 53 + .../Pipelines/ContinuousIntegrationTrigger.cs | 89 + src/Sdk/DTPipelines/Pipelines/CounterStore.cs | 61 + .../DTPipelines/Pipelines/CreateJobResult.cs | 50 + .../Pipelines/DeploymentExecutionOptions.cs | 74 + .../Pipelines/DeploymentGroupTarget.cs | 158 + .../Environment/EnvironmentReference.cs | 38 + .../Pipelines/EnvironmentDeploymentTarget.cs | 21 + .../DTPipelines/Pipelines/EnvironmentStore.cs | 97 + .../DTPipelines/Pipelines/ExecutionOptions.cs | 107 + .../Pipelines/ExpandPhaseResult.cs | 66 + .../DTPipelines/Pipelines/ExpressionResult.cs | 58 + .../DTPipelines/Pipelines/ExpressionValue.cs | 311 + .../Pipelines/Expressions/CounterNode.cs | 28 + .../Expressions/ExpressionConstants.cs | 30 + .../Expressions/InputValidationConstants.cs | 32 + .../Pipelines/Expressions/InputValueNode.cs | 15 + .../Pipelines/Expressions/IsEmailNode.cs | 22 + .../Expressions/IsIPv4AddressNode.cs | 22 + .../Pipelines/Expressions/IsInRangeNode.cs | 24 + .../Pipelines/Expressions/IsMatchNode.cs | 30 + .../Pipelines/Expressions/IsSHA1Node.cs | 22 + .../Pipelines/Expressions/IsUrlNode.cs | 22 + .../Pipelines/Expressions/LengthNode.cs | 63 + .../Expressions/PipelineContextNode.cs | 32 + .../Pipelines/Expressions/RegexUtility.cs | 153 + .../Expressions/VariablesContextNode.cs | 16 + .../WellKnownRegularExpressions.cs | 63 + .../DTPipelines/Pipelines/GraphCondition.cs | 247 + src/Sdk/DTPipelines/Pipelines/GroupStep.cs | 86 + .../Pipelines/IAgentPoolResolver.cs | 37 + .../DTPipelines/Pipelines/IAgentPoolStore.cs | 27 + .../Pipelines/IAgentQueueResolver.cs | 37 + .../DTPipelines/Pipelines/IAgentQueueStore.cs | 27 + .../DTPipelines/Pipelines/ICounterResolver.cs | 11 + .../DTPipelines/Pipelines/ICounterStore.cs | 24 + .../Pipelines/IEnvironmentResolver.cs | 14 + .../Pipelines/IEnvironmentStore.cs | 22 + src/Sdk/DTPipelines/Pipelines/IGraphNode.cs | 51 + src/Sdk/DTPipelines/Pipelines/IJobFactory.cs | 20 + .../DTPipelines/Pipelines/IPackageStore.cs | 12 + .../DTPipelines/Pipelines/IPhaseProvider.cs | 20 + .../DTPipelines/Pipelines/IPipelineContext.cs | 59 + .../Pipelines/IPipelineContextExtensions.cs | 81 + .../Pipelines/IPipelineIdGenerator.cs | 35 + .../DTPipelines/Pipelines/IResourceStore.cs | 87 + .../Pipelines/IResourceStoreExtensions.cs | 198 + .../Pipelines/ISecureFileResolver.cs | 37 + .../DTPipelines/Pipelines/ISecureFileStore.cs | 19 + .../Pipelines/IServiceEndpointResolver.cs | 46 + .../Pipelines/IServiceEndpointStore.cs | 39 + .../DTPipelines/Pipelines/IStepProvider.cs | 21 + .../DTPipelines/Pipelines/ITaskResolver.cs | 12 + src/Sdk/DTPipelines/Pipelines/ITaskStore.cs | 30 + .../Pipelines/ITaskTemplateResolver.cs | 14 + .../Pipelines/ITaskTemplateStore.cs | 16 + src/Sdk/DTPipelines/Pipelines/IVariable.cs | 88 + .../Pipelines/IVariableGroupResolver.cs | 32 + .../Pipelines/IVariableGroupStore.cs | 21 + .../Pipelines/IVariableValueProvider.cs | 22 + src/Sdk/DTPipelines/Pipelines/Job.cs | 291 + src/Sdk/DTPipelines/Pipelines/JobContainer.cs | 60 + .../Pipelines/JobExpansionOptions.cs | 106 + src/Sdk/DTPipelines/Pipelines/JobFactory.cs | 480 + src/Sdk/DTPipelines/Pipelines/JobResources.cs | 108 + src/Sdk/DTPipelines/Pipelines/JobStep.cs | 48 + .../GraphConditionNamedValue.cs | 24 + .../ObjectTemplating/IFileProvider.cs | 13 + .../ObjectTemplating/JobDisplayNameBuilder.cs | 59 + .../ObjectTemplating/JsonObjectReader.cs | 234 + .../ObjectTemplating/MatrixBuilder.cs | 445 + .../ObjectTemplating/ParseOptions.cs | 45 + .../Pipelines/ObjectTemplating/ParseResult.cs | 30 + .../PipelineTemplateConstants.cs | 82 + .../PipelineTemplateConverter.cs | 1147 +++ .../PipelineTemplateEvaluator.cs | 526 + .../PipelineTemplateParser.cs | 239 + .../PipelineTemplateSchemaFactory.cs | 26 + .../ObjectTemplating/ReferenceNameBuilder.cs | 121 + .../ObjectTemplating/TaskResultExtensions.cs | 37 + .../ObjectTemplating/TemplateReference.cs | 197 + .../ObjectTemplating/YamlObjectReader.cs | 572 ++ .../ObjectTemplating/YamlObjectWriter.cs | 73 + .../ObjectTemplating/YamlTemplateLoader.cs | 251 + src/Sdk/DTPipelines/Pipelines/PackageStore.cs | 51 + .../Pipelines/ParallelExecutionOptions.cs | 315 + src/Sdk/DTPipelines/Pipelines/Phase.cs | 1677 ++++ .../DTPipelines/Pipelines/PhaseCondition.cs | 22 + .../DTPipelines/Pipelines/PhaseDependency.cs | 56 + src/Sdk/DTPipelines/Pipelines/PhaseNode.cs | 421 + src/Sdk/DTPipelines/Pipelines/PhaseTarget.cs | 260 + .../DTPipelines/Pipelines/PhaseTargetType.cs | 24 + .../Pipelines/PipelineBuildContext.cs | 182 + .../Pipelines/PipelineBuildResult.cs | 73 + .../DTPipelines/Pipelines/PipelineBuilder.cs | 411 + .../Pipelines/PipelineConstants.cs | 221 + .../Pipelines/PipelineContextBase.cs | 439 + .../Pipelines/PipelineContextBuilder.cs | 562 ++ .../Pipelines/PipelineEnvironment.cs | 212 + .../Pipelines/PipelineException.cs | 297 + .../Pipelines/PipelineIdGenerator.cs | 114 + .../DTPipelines/Pipelines/PipelineProcess.cs | 115 + .../DTPipelines/Pipelines/PipelineResource.cs | 57 + .../Pipelines/PipelineResources.cs | 570 ++ .../DTPipelines/Pipelines/PipelineState.cs | 22 + .../Pipelines/PipelineStepsTemplate.cs | 55 + .../DTPipelines/Pipelines/PipelineTemplate.cs | 147 + .../DTPipelines/Pipelines/PipelineTrigger.cs | 22 + .../Pipelines/PipelineTriggerType.cs | 18 + .../Pipelines/PipelineUtilities.cs | 407 + .../Pipelines/PipelineValidationError.cs | 60 + .../Pipelines/PipelineValidationErrors.cs | 99 + .../DTPipelines/Pipelines/ProviderPhase.cs | 227 + .../Pipelines/PullRequestTrigger.cs | 83 + .../Pipelines/RepositoryResource.cs | 166 + .../DTPipelines/Pipelines/RepositoryTypes.cs | 15 + src/Sdk/DTPipelines/Pipelines/Resource.cs | 72 + .../DTPipelines/Pipelines/ResourceComparer.cs | 20 + .../Pipelines/ResourceProperties.cs | 256 + .../Pipelines/ResourceReference.cs | 56 + .../DTPipelines/Pipelines/ResourceStore.cs | 666 ++ .../Pipelines/Runtime/AgentJobStartedData.cs | 17 + .../Runtime/GraphExecutionContext.cs | 46 + .../Pipelines/Runtime/GraphNodeInstance.cs | 144 + .../Pipelines/Runtime/JobAttempt.cs | 14 + .../Pipelines/Runtime/JobExecutionContext.cs | 113 + .../Pipelines/Runtime/JobInstance.cs | 124 + .../Pipelines/Runtime/JobStartedEventData.cs | 30 + .../Pipelines/Runtime/PhaseAttempt.cs | 30 + .../Runtime/PhaseExecutionContext.cs | 116 + .../Pipelines/Runtime/PhaseInstance.cs | 61 + .../Runtime/PipelineAttemptBuilder.cs | 632 ++ .../Runtime/PipelineExecutionContext.cs | 68 + .../Pipelines/Runtime/StageAttempt.cs | 40 + .../Runtime/StageExecutionContext.cs | 63 + .../Pipelines/Runtime/StageInstance.cs | 61 + .../Pipelines/SecretStoreConfiguration.cs | 91 + .../Pipelines/SecureFileReference.cs | 38 + .../DTPipelines/Pipelines/SecureFileStore.cs | 105 + src/Sdk/DTPipelines/Pipelines/ServerTarget.cs | 92 + .../Pipelines/ServiceEndpointReference.cs | 38 + .../Pipelines/ServiceEndpointStore.cs | 118 + src/Sdk/DTPipelines/Pipelines/Stage.cs | 208 + .../DTPipelines/Pipelines/StageCondition.cs | 22 + src/Sdk/DTPipelines/Pipelines/Step.cs | 88 + .../DTPipelines/Pipelines/StepConverter.cs | 101 + .../DTPipelines/Pipelines/StrategyResult.cs | 33 + .../DTPipelines/Pipelines/TaskCondition.cs | 166 + .../Pipelines/TaskDefinitionExtensions.cs | 70 + src/Sdk/DTPipelines/Pipelines/TaskStep.cs | 177 + .../Pipelines/TaskStepDefinitionReference.cs | 51 + src/Sdk/DTPipelines/Pipelines/TaskStore.cs | 214 + .../Pipelines/TaskTemplateReference.cs | 47 + .../DTPipelines/Pipelines/TaskTemplateStep.cs | 65 + .../Pipelines/TaskTemplateStore.cs | 36 + .../Pipelines/TimelineRecordIdGenerator.cs | 189 + .../Pipelines/Validation/GraphValidator.cs | 186 + .../Pipelines/Validation/IInputValidator.cs | 18 + .../Validation/InputValidationContext.cs | 73 + .../Validation/InputValidationResult.cs | 43 + .../Pipelines/Validation/InputValidator.cs | 44 + .../Pipelines/Validation/NameValidation.cs | 59 + .../Validation/ScriptTaskValidator.cs | 198 + .../Pipelines/Validation/ValidationResult.cs | 110 + src/Sdk/DTPipelines/Pipelines/Variable.cs | 50 + .../Pipelines/VariableGroupReference.cs | 63 + .../Pipelines/VariableGroupStore.cs | 187 + .../Pipelines/VariablesDictionary.cs | 355 + .../DTPipelines/Pipelines/WorkspaceMapping.cs | 71 + .../DTPipelines/Pipelines/WorkspaceOptions.cs | 32 + src/Sdk/DTPipelines/workflow-v1.0.json | 646 ++ .../DTWebApi/WebApi/AgentJobRequestMessage.cs | 79 + .../DTWebApi/WebApi/AgentRefreshMessage.cs | 49 + src/Sdk/DTWebApi/WebApi/AuditAction.cs | 19 + .../AzureKeyVaultVariableGroupProviderData.cs | 30 + .../WebApi/AzureKeyVaultVariableValue.cs | 47 + src/Sdk/DTWebApi/WebApi/Demand.cs | 103 + src/Sdk/DTWebApi/WebApi/DemandEquals.cs | 36 + src/Sdk/DTWebApi/WebApi/DemandExists.cs | 36 + .../DTWebApi/WebApi/DemandJsonConverter.cs | 45 + .../DTWebApi/WebApi/DemandMinimumVersion.cs | 127 + src/Sdk/DTWebApi/WebApi/DeploymentGroup.cs | 83 + .../WebApi/DeploymentGroupActionFilter.cs | 31 + .../WebApi/DeploymentGroupCreateParameter.cs | 72 + .../DTWebApi/WebApi/DeploymentGroupExpands.cs | 31 + .../DTWebApi/WebApi/DeploymentGroupMetrics.cs | 62 + .../WebApi/DeploymentGroupReference.cs | 90 + .../WebApi/DeploymentGroupUpdateParameter.cs | 32 + src/Sdk/DTWebApi/WebApi/DeploymentMachine.cs | 99 + .../WebApi/DeploymentMachineExpands.cs | 19 + .../DTWebApi/WebApi/DeploymentMachineGroup.cs | 38 + .../WebApi/DeploymentMachineGroupReference.cs | 37 + .../DTWebApi/WebApi/DeploymentPoolSummary.cs | 78 + .../WebApi/DeploymentPoolSummaryExpands.cs | 31 + .../WebApi/DeploymentTargetExpands.cs | 37 + .../WebApi/DeploymentTargetUpdateParameter.cs | 41 + .../DTWebApi/WebApi/DiagnosticLogMetadata.cs | 36 + .../DTWebApi/WebApi/EnableAccessTokenType.cs | 10 + .../Environment/EnvironmentCreateParameter.cs | 35 + .../EnvironmentDeploymentExecutionRecord.cs | 192 + .../WebApi/Environment/EnvironmentExpands.cs | 25 + .../WebApi/Environment/EnvironmentInstance.cs | 116 + .../EnvironmentLinkedResourceReference.cs | 34 + .../Environment/EnvironmentReference.cs | 18 + .../WebApi/Environment/EnvironmentResource.cs | 51 + .../EnvironmentResourceReference.cs | 63 + .../Environment/EnvironmentResourceType.cs | 31 + .../Environment/EnvironmentUpdateParameter.cs | 34 + .../WebApi/Environment/KubernetesResource.cs | 20 + .../KubernetesResourceCreateParameters.cs | 23 + .../WebApi/Environment/VirtualMachine.cs | 41 + .../WebApi/Environment/VirtualMachineGroup.cs | 14 + .../VirtualMachineGroupCreateParameters.cs | 14 + src/Sdk/DTWebApi/WebApi/Exceptions.cs | 2461 +++++ .../WebApi/ExpressionValidationItem.cs | 13 + .../DTWebApi/WebApi/IOrchestrationProcess.cs | 172 + .../WebApi/ITaskDefinitionReference.cs | 16 + .../DTWebApi/WebApi/IdentityRefExtensions.cs | 27 + .../DTWebApi/WebApi/InputBindingContext.cs | 19 + .../DTWebApi/WebApi/InputValidationItem.cs | 23 + src/Sdk/DTWebApi/WebApi/Issue.cs | 91 + src/Sdk/DTWebApi/WebApi/IssueType.cs | 14 + src/Sdk/DTWebApi/WebApi/JobCancelMessage.cs | 47 + src/Sdk/DTWebApi/WebApi/JobEnvironment.cs | 294 + src/Sdk/DTWebApi/WebApi/JobEvent.cs | 355 + .../DTWebApi/WebApi/JobExecutionModeTypes.cs | 12 + src/Sdk/DTWebApi/WebApi/JobOption.cs | 87 + src/Sdk/DTWebApi/WebApi/JobRequestMessage.cs | 86 + .../WebApi/JobRequestMessageJsonConverter.cs | 89 + .../DTWebApi/WebApi/JobRequestMessageTypes.cs | 15 + .../WebApi/MachineGroupActionFilter.cs | 19 + .../WebApi/MarketplacePurchasedLicense.cs | 35 + src/Sdk/DTWebApi/WebApi/MaskHint.cs | 54 + src/Sdk/DTWebApi/WebApi/MaskType.cs | 14 + src/Sdk/DTWebApi/WebApi/MetricsColumn.cs | 100 + src/Sdk/DTWebApi/WebApi/MetricsRow.cs | 71 + src/Sdk/DTWebApi/WebApi/PackageMetadata.cs | 92 + src/Sdk/DTWebApi/WebApi/PackageVersion.cs | 108 + src/Sdk/DTWebApi/WebApi/PlanEnvironment.cs | 156 + src/Sdk/DTWebApi/WebApi/PlanGroupStatus.cs | 19 + src/Sdk/DTWebApi/WebApi/PlanTemplateType.cs | 19 + src/Sdk/DTWebApi/WebApi/ProjectReference.cs | 21 + .../WebApi/PublishTaskGroupMetadata.cs | 20 + src/Sdk/DTWebApi/WebApi/ResourceLimit.cs | 96 + src/Sdk/DTWebApi/WebApi/ResourceUsage.cs | 58 + src/Sdk/DTWebApi/WebApi/SecureFile.cs | 109 + .../DTWebApi/WebApi/SecureFileActionFilter.cs | 19 + .../WebApi/ServerTaskRequestMessage.cs | 43 + .../ServerTaskSectionExecutionOutput.cs | 14 + .../AadLoginPromptOption.cs | 46 + .../AadOauthTokenRequest.cs | 21 + .../AadOauthTokenResult.cs | 15 + .../AzureKeyVaultPermission.cs | 16 + .../AzureManagementGroup.cs | 40 + .../AzureManagementGroupQueryResult.cs | 26 + .../ServiceEndpointLegacy/AzurePermission.cs | 130 + .../AzurePermissionResourceProviders.cs | 12 + .../AzureResourcePermission.cs | 16 + .../AzureRoleAssignmentPermission.cs | 17 + .../AzureSubscription.cs | 24 + .../AzureSubscriptionQueryResult.cs | 17 + .../DataSourceBinding.cs | 29 + .../EndpointAuthorization.cs | 120 + .../SerializationHelper.cs | 107 + .../ServiceEndpointLegacy/ServiceEndpoint.cs | 296 + .../ServiceEndpointTypes.cs | 91 + src/Sdk/DTWebApi/WebApi/TaskAgent.cs | 223 + .../DTWebApi/WebApi/TaskAgentAuthorization.cs | 69 + src/Sdk/DTWebApi/WebApi/TaskAgentCloud.cs | 156 + .../DTWebApi/WebApi/TaskAgentCloudRequest.cs | 116 + src/Sdk/DTWebApi/WebApi/TaskAgentCloudType.cs | 51 + .../DTWebApi/WebApi/TaskAgentDelaySource.cs | 48 + .../DTWebApi/WebApi/TaskAgentHttpClient.cs | 722 ++ .../WebApi/TaskAgentHttpClientCompatBase.cs | 372 + src/Sdk/DTWebApi/WebApi/TaskAgentJob.cs | 103 + .../DTWebApi/WebApi/TaskAgentJobRequest.cs | 433 + .../WebApi/TaskAgentJobResultFilter.cs | 37 + src/Sdk/DTWebApi/WebApi/TaskAgentJobStep.cs | 90 + src/Sdk/DTWebApi/WebApi/TaskAgentJobTask.cs | 30 + .../DTWebApi/WebApi/TaskAgentJobVariable.cs | 30 + src/Sdk/DTWebApi/WebApi/TaskAgentMessage.cs | 60 + src/Sdk/DTWebApi/WebApi/TaskAgentPool.cs | 201 + .../WebApi/TaskAgentPoolActionFilter.cs | 22 + .../TaskAgentPoolMaintenanceDefinition.cs | 168 + .../WebApi/TaskAgentPoolMaintenanceJob.cs | 169 + .../TaskAgentPoolMaintenanceJobResult.cs | 18 + .../TaskAgentPoolMaintenanceJobStatus.cs | 21 + .../TaskAgentPoolMaintenanceJobTargetAgent.cs | 41 + .../WebApi/TaskAgentPoolMaintenanceOptions.cs | 33 + ...TaskAgentPoolMaintenanceRetentionPolicy.cs | 47 + .../TaskAgentPoolMaintenanceSchedule.cs | 58 + .../TaskAgentPoolMaintenanceScheduleDays.cs | 62 + .../DTWebApi/WebApi/TaskAgentPoolReference.cs | 108 + src/Sdk/DTWebApi/WebApi/TaskAgentPoolType.cs | 23 + .../TaskAgentProvisiongStateConstants.cs | 13 + src/Sdk/DTWebApi/WebApi/TaskAgentPublicKey.cs | 76 + src/Sdk/DTWebApi/WebApi/TaskAgentQueue.cs | 97 + .../WebApi/TaskAgentQueueActionFilter.cs | 22 + src/Sdk/DTWebApi/WebApi/TaskAgentReference.cs | 146 + src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs | 121 + .../DTWebApi/WebApi/TaskAgentSessionKey.cs | 34 + src/Sdk/DTWebApi/WebApi/TaskAgentStatus.cs | 14 + .../DTWebApi/WebApi/TaskAgentStatusFilter.cs | 31 + src/Sdk/DTWebApi/WebApi/TaskAgentUpdate.cs | 115 + .../DTWebApi/WebApi/TaskAgentUpdateReason.cs | 178 + src/Sdk/DTWebApi/WebApi/TaskAttachment.cs | 105 + src/Sdk/DTWebApi/WebApi/TaskDefinition.cs | 552 ++ .../DTWebApi/WebApi/TaskDefinitionEndpoint.cs | 87 + .../WebApi/TaskDefinitionReference.cs | 78 + src/Sdk/DTWebApi/WebApi/TaskDefinitionType.cs | 12 + src/Sdk/DTWebApi/WebApi/TaskExecution.cs | 68 + src/Sdk/DTWebApi/WebApi/TaskGroup.cs | 173 + .../WebApi/TaskGroupCreateParameter.cs | 140 + .../DTWebApi/WebApi/TaskGroupDefinition.cs | 96 + src/Sdk/DTWebApi/WebApi/TaskGroupExpands.cs | 11 + .../DTWebApi/WebApi/TaskGroupQueryOrder.cs | 30 + src/Sdk/DTWebApi/WebApi/TaskGroupRevision.cs | 34 + src/Sdk/DTWebApi/WebApi/TaskGroupStep.cs | 156 + .../WebApi/TaskGroupUpdateParameter.cs | 158 + src/Sdk/DTWebApi/WebApi/TaskHttpClient.cs | 147 + .../DTWebApi/WebApi/TaskHubLicenseDetails.cs | 78 + .../DTWebApi/WebApi/TaskInputDefinition.cs | 35 + src/Sdk/DTWebApi/WebApi/TaskInputType.cs | 15 + src/Sdk/DTWebApi/WebApi/TaskInstance.cs | 130 + src/Sdk/DTWebApi/WebApi/TaskLog.cs | 55 + src/Sdk/DTWebApi/WebApi/TaskLogReference.cs | 23 + .../WebApi/TaskOrchestrationContainer.cs | 116 + .../DTWebApi/WebApi/TaskOrchestrationItem.cs | 24 + .../TaskOrchestrationItemJsonConverter.cs | 100 + .../WebApi/TaskOrchestrationItemType.cs | 14 + .../DTWebApi/WebApi/TaskOrchestrationJob.cs | 166 + .../DTWebApi/WebApi/TaskOrchestrationOwner.cs | 60 + .../DTWebApi/WebApi/TaskOrchestrationPlan.cs | 145 + ...TaskOrchestrationPlanGroupsQueueMetrics.cs | 23 + .../WebApi/TaskOrchestrationPlanReference.cs | 86 + .../WebApi/TaskOrchestrationPlanState.cs | 20 + .../WebApi/TaskOrchestrationQueuedPlan.cs | 72 + .../TaskOrchestrationQueuedPlanGroup.cs | 61 + src/Sdk/DTWebApi/WebApi/TaskOutputVariable.cs | 38 + src/Sdk/DTWebApi/WebApi/TaskReference.cs | 87 + src/Sdk/DTWebApi/WebApi/TaskResourceIds.cs | 264 + src/Sdk/DTWebApi/WebApi/TaskResult.cs | 26 + .../DTWebApi/WebApi/TaskRunsOnConstants.cs | 23 + .../DTWebApi/WebApi/TaskSourceDefinition.cs | 35 + src/Sdk/DTWebApi/WebApi/TaskVersion.cs | 182 + src/Sdk/DTWebApi/WebApi/TaskVersionSpec.cs | 239 + src/Sdk/DTWebApi/WebApi/Timeline.cs | 77 + src/Sdk/DTWebApi/WebApi/TimelineAttempt.cs | 49 + src/Sdk/DTWebApi/WebApi/TimelineRecord.cs | 307 + .../WebApi/TimelineRecordFeedLinesWrapper.cs | 35 + .../DTWebApi/WebApi/TimelineRecordState.cs | 17 + src/Sdk/DTWebApi/WebApi/TimelineReference.cs | 34 + src/Sdk/DTWebApi/WebApi/ValidationItem.cs | 60 + .../WebApi/ValidationItemJsonConverter.cs | 102 + src/Sdk/DTWebApi/WebApi/ValidationRequest.cs | 26 + src/Sdk/DTWebApi/WebApi/ValidationTypes.cs | 12 + src/Sdk/DTWebApi/WebApi/VariableGroup.cs | 244 + .../WebApi/VariableGroupActionFilter.cs | 19 + .../WebApi/VariableGroupParameters.cs | 159 + .../WebApi/VariableGroupProviderData.cs | 13 + .../WebApi/VariableGroupQueryOrder.cs | 29 + src/Sdk/DTWebApi/WebApi/VariableGroupType.cs | 12 + .../DTWebApi/WebApi/VariableGroupUtility.cs | 318 + src/Sdk/DTWebApi/WebApi/VariableUtility.cs | 321 + src/Sdk/DTWebApi/WebApi/VariableValue.cs | 44 + src/Sdk/DTWebApi/WebApi/VersionParser.cs | 45 + .../WellKnownDistributedTaskVariables.cs | 53 + .../DTWebApi/WebApi/WellKnownPackageTypes.cs | 9 + .../WebApi/WellKnownServiceEndpointNames.cs | 9 + src/Sdk/InternalsVisibleTo.cs | 9 + src/Sdk/Namespaces.cs | 324 + src/Sdk/Resources/CommonResources.g.cs | 608 ++ src/Sdk/Resources/ContentResources.g.cs | 20 + src/Sdk/Resources/ExpressionResources.g.cs | 116 + src/Sdk/Resources/FileContainerResources.g.cs | 104 + src/Sdk/Resources/GraphResources.g.cs | 92 + src/Sdk/Resources/IdentityResources.g.cs | 400 + src/Sdk/Resources/JwtResources.g.cs | 146 + src/Sdk/Resources/LocationResources.g.cs | 20 + src/Sdk/Resources/PatchResources.g.cs | 122 + src/Sdk/Resources/PipelineStrings.g.cs | 500 + src/Sdk/Resources/SecurityResources.g.cs | 20 + src/Sdk/Resources/TemplateStrings.g.cs | 140 + src/Sdk/Resources/WebApiResources.g.cs | 392 + src/Sdk/Sdk.csproj | 38 + src/Sdk/WebApi/WebApi/Attributes.cs | 177 + src/Sdk/WebApi/WebApi/Constants.cs | 101 + .../Contracts/Common/SocialDescriptor.cs | 345 + .../Contracts/Common/SubjectDescriptor.cs | 519 + .../AccessTokenResult.cs | 32 + .../AuthorizationGrant.cs | 26 + .../AuthorizationGrantJsonConverter.cs | 50 + .../DelegatedAuthorization/GrantType.cs | 11 + .../JwtBearerAuthorizationGrant.cs | 21 + .../RefreshTokenGrant.cs | 20 + .../DelegatedAuthorization/TokenError.cs | 39 + .../Contracts/FileContainer/Enumerations.cs | 68 + .../Contracts/FileContainer/FileContainer.cs | 118 + .../FileContainer/FileContainerItem.cs | 174 + .../Contracts/FormInput/InputDataType.cs | 47 + .../Contracts/FormInput/InputDescriptor.cs | 130 + .../WebApi/Contracts/FormInput/InputMode.cs | 53 + .../Contracts/FormInput/InputValidation.cs | 81 + .../WebApi/Contracts/FormInput/InputValues.cs | 166 + .../Contracts/Graph/Client/GraphGroup.cs | 171 + .../Graph/Client/GraphGroupCreationContext.cs | 105 + .../GraphGroupCreationContextJsonConverter.cs | 46 + .../Contracts/Graph/Client/GraphMember.cs | 58 + .../Contracts/Graph/Client/GraphScope.cs | 138 + .../Contracts/Graph/Client/GraphSubject.cs | 78 + .../Graph/Client/GraphSubjectBase.cs | 80 + .../Graph/Client/GraphSubjectJsonConverter.cs | 42 + .../Graph/Client/GraphSystemSubject.cs | 30 + .../Contracts/Graph/Client/GraphUser.cs | 107 + .../Graph/Client/GraphUserCreationContext.cs | 81 + .../GraphUserCreationContextJsonConverter.cs | 46 + .../Graph/Client/GraphUserUpdateContext.cs | 45 + .../GraphUserUpdateContextJsonConverter.cs | 33 + .../WebApi/Contracts/Graph/Constants.cs | 198 + .../Contracts/Identity/ChangedIdentities.cs | 119 + .../Contracts/Identity/CreateGroupsInfo.cs | 26 + .../Contracts/Identity/CreateScopeInfo.cs | 51 + .../Identity/FrameworkIdentityInfo.cs | 20 + .../Identity/FrameworkIdentityType.cs | 10 + .../Contracts/Identity/GroupMembership.cs | 93 + .../Identity/IReadOnlyVssIdentity.cs | 23 + .../WebApi/Contracts/Identity/IVssIdentity.cs | 15 + .../WebApi/Contracts/Identity/Identity.cs | 557 ++ .../Contracts/Identity/IdentityBatchInfo.cs | 76 + .../Contracts/Identity/IdentityCollections.cs | 45 + .../Contracts/Identity/IdentityDescriptor.cs | 579 ++ .../Identity/IdentityEnumerations.cs | 196 + .../Contracts/Identity/IdentityMetaType.cs | 12 + .../Contracts/Identity/IdentityScope.cs | 146 + .../WebApi/Contracts/Identity/IdentitySelf.cs | 105 + .../Contracts/Identity/IdentitySnapshot.cs | 81 + .../Contracts/Identity/IdentityUpdateData.cs | 18 + .../Identity/RequestHeadersContext.cs | 64 + .../Contracts/Identity/SequenceContext.cs | 102 + .../Contracts/Identity/SwapIdentityInfo.cs | 25 + .../WebApi/Contracts/Identity/TenantInfo.cs | 24 + .../Contracts/Licensing/AccountLicense.cs | 134 + .../WebApi/Contracts/Licensing/Definitions.cs | 96 + .../WebApi/Contracts/Licensing/License.cs | 403 + .../Contracts/Licensing/LicenseComparer.cs | 98 + .../Licensing/LicenseJsonConverter.cs | 66 + .../Licensing/LicenseTypeConverter.cs | 85 + .../WebApi/Contracts/Licensing/MsdnLicense.cs | 143 + .../Contracts/Location/AccessMapping.cs | 151 + .../Contracts/Location/ConnectionData.cs | 397 + .../WebApi/Contracts/Location/Constants.cs | 107 + .../Contracts/Location/LocationMapping.cs | 115 + .../Contracts/Location/ResourceAreaInfo.cs | 31 + .../Contracts/Location/ServiceDefinition.cs | 649 ++ src/Sdk/WebApi/WebApi/Contracts/PagedList.cs | 25 + .../Contracts/Patch/AddPatchOperation.cs | 94 + .../WebApi/Contracts/Patch/Exceptions.cs | 64 + .../WebApi/Contracts/Patch/IPatchDocument.cs | 22 + .../WebApi/Contracts/Patch/IPatchOperation.cs | 44 + .../Contracts/Patch/IPatchOperationApplied.cs | 15 + .../Patch/IPatchOperationApplying.cs | 15 + .../Contracts/Patch/Json/JsonPatchDocument.cs | 13 + .../Patch/Json/JsonPatchOperation.cs | 38 + .../Patch/ObjectDictionaryConverter.cs | 36 + .../WebApi/Contracts/Patch/Operation.cs | 14 + .../WebApi/Contracts/Patch/PatchOperation.cs | 400 + .../Patch/PatchOperationAppliedEventArgs.cs | 26 + .../Patch/PatchOperationApplyingEventArgs.cs | 26 + .../Contracts/Patch/RemovePatchOperation.cs | 79 + .../Contracts/Patch/ReplacePatchOperation.cs | 90 + .../Contracts/Patch/TestPatchOperation.cs | 115 + .../Contracts/Profile/AttributeDescriptor.cs | 139 + .../Contracts/Profile/AttributesContainer.cs | 65 + .../Profile/AttributesQueryContext.cs | 135 + .../WebApi/WebApi/Contracts/Profile/Avatar.cs | 48 + .../Contracts/Profile/CoreProfileAttribute.cs | 12 + .../WebApi/Contracts/Profile/ITimeStamped.cs | 11 + .../WebApi/Contracts/Profile/IVersioned.cs | 10 + .../WebApi/Contracts/Profile/Profile.cs | 234 + .../Contracts/Profile/ProfileAttribute.cs | 12 + .../Contracts/Profile/ProfileAttributeBase.cs | 41 + .../Contracts/Profile/ProfileQueryContext.cs | 51 + .../WebApi/Contracts/PropertiesCollection.cs | 462 + .../Contracts/ReferenceLink/ReferenceLink.cs | 54 + .../Contracts/ReferenceLink/ReferenceLinks.cs | 304 + .../Contracts/Users/UpdateUserParameters.cs | 181 + src/Sdk/WebApi/WebApi/Contracts/Users/User.cs | 187 + .../WebApi/Exceptions/CommonRestExceptions.cs | 76 + .../Exceptions/FileContainerExceptions.cs | 360 + .../WebApi/Exceptions/GraphExceptions.cs | 469 + .../WebApi/Exceptions/IdentityExceptions.cs | 1533 +++ .../WebApi/Exceptions/LocationExceptions.cs | 155 + .../WebApi/Exceptions/SecurityExceptions.cs | 158 + .../Exceptions/VssApiResourceExceptions.cs | 186 + .../WebApi/WebApi/HttpClients/Constants.cs | 123 + .../HttpClients/FileContainerHttpClient.cs | 703 ++ .../WebApi/HttpClients/IdentityHttpClient.cs | 1167 +++ .../WebApi/HttpClients/LocationHttpClient.cs | 184 + src/Sdk/WebApi/WebApi/IdentityRef.cs | 124 + src/Sdk/WebApi/WebApi/JsonUtility.cs | 258 + .../WebApi/Jwt/IJsonWebTokenHeaderProvider.cs | 10 + src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs | 687 ++ .../WebApi/Jwt/JsonWebTokenConstants.cs | 36 + .../WebApi/Jwt/JsonWebTokenExceptions.cs | 230 + .../WebApi/Jwt/JsonWebTokenUtilities.cs | 321 + .../Jwt/JsonWebTokenValidationParameters.cs | 92 + .../WebApi/Jwt/UnixEpochDateTimeConverter.cs | 24 + src/Sdk/WebApi/WebApi/Location/Interfaces.cs | 592 ++ .../WebApi/Location/LocationCacheManager.cs | 941 ++ .../WebApi/Location/LocationServerMapCache.cs | 424 + .../WebApi/WebApi/Location/LocationService.cs | 281 + .../WebApi/Location/LocationXmlOperator.cs | 601 ++ .../WebApi/Location/ServerDataProvider.cs | 816 ++ .../OAuth/IVssOAuthTokenParameterProvider.cs | 18 + .../WebApi/OAuth/VssOAuthAccessToken.cs | 84 + .../OAuth/VssOAuthAccessTokenCredential.cs | 85 + .../WebApi/OAuth/VssOAuthClientCredential.cs | 78 + .../OAuth/VssOAuthClientCredentialType.cs | 18 + .../OAuth/VssOAuthClientCredentialsGrant.cs | 24 + .../WebApi/WebApi/OAuth/VssOAuthConstants.cs | 82 + .../WebApi/WebApi/OAuth/VssOAuthCredential.cs | 133 + .../WebApi/WebApi/OAuth/VssOAuthExceptions.cs | 94 + src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrant.cs | 57 + .../WebApi/WebApi/OAuth/VssOAuthGrantType.cs | 28 + .../OAuth/VssOAuthJwtBearerAssertion.cs | 149 + .../VssOAuthJwtBearerClientCredential.cs | 62 + .../WebApi/OAuth/VssOAuthTokenHttpClient.cs | 216 + .../WebApi/OAuth/VssOAuthTokenParameters.cs | 115 + .../WebApi/OAuth/VssOAuthTokenProvider.cs | 214 + .../WebApi/OAuth/VssOAuthTokenRequest.cs | 232 + .../WebApi/OAuth/VssOAuthTokenResponse.cs | 89 + .../Profile/ProfileArgumentValidation.cs | 40 + .../ProxyAuthenticationRequiredException.cs | 29 + .../WebApi/PublicAccessJsonConverter.cs | 71 + src/Sdk/WebApi/WebApi/ResourceLocationIds.cs | 1511 +++ src/Sdk/WebApi/WebApi/ServiceEvent.cs | 91 + src/Sdk/WebApi/WebApi/TaskExtensions.cs | 65 + src/Sdk/WebApi/WebApi/Utilities/AsyncLock.cs | 57 + .../WebApi/Utilities/BaseSecuredObject.cs | 48 + .../Utilities/ClientGeneratorAttributes.cs | 59 + .../WebApi/WebApi/Utilities/ISecuredObject.cs | 42 + .../WebApi/Utilities/UserAgentUtility.cs | 217 + .../XmlSerializableDataContractExtensions.cs | 355 + .../WebApi/WebApi/VssApiResourceLocation.cs | 197 + .../VssApiResourceLocationCollection.cs | 143 + .../WebApi/WebApi/VssApiResourceVersion.cs | 196 + .../WebApi/VssApiResourceVersionExtensions.cs | 93 + ...sCamelCasePropertyNamesContractResolver.cs | 39 + .../WebApi/VssClientHttpRequestSettings.cs | 161 + src/Sdk/WebApi/WebApi/VssClientSettings.cs | 126 + src/Sdk/WebApi/WebApi/VssConnectMode.cs | 12 + src/Sdk/WebApi/WebApi/VssConnection.cs | 905 ++ .../WebApi/VssConnectionParameterKeys.cs | 25 + src/Sdk/WebApi/WebApi/VssEventId.cs | 21 + src/Sdk/WebApi/WebApi/VssHttpClientBase.cs | 1318 +++ src/Sdk/WebApi/WebApi/VssHttpUriUtility.cs | 229 + .../WebApi/WebApi/VssJsonCollectionWrapper.cs | 119 + .../WebApi/WebApi/VssJsonCreationConverter.cs | 39 + .../WebApi/VssJsonMediaTypeFormatter.cs | 206 + src/Sdk/WebApi/WebApi/VssRequestTimerTrace.cs | 181 + src/Sdk/WebApi/WebApi/VssResponseContext.cs | 74 + .../WebApi/WebApi/VssSecureJsonConverter.cs | 59 + .../WebApi/VssServiceResponseException.cs | 36 + .../WebApi/WebApi/VssSigningCredentials.cs | 490 + src/Sdk/WebApi/WebApi/WrappedException.cs | 621 ++ src/Sdk/nuget.config | 9 + src/Sync-Sdk.ps1 | 432 + src/Test/L0/CommandLineParserL0.cs | 127 + src/Test/L0/ConstantGenerationL0.cs | 29 + src/Test/L0/Container/ContainerInfoL0.cs | 38 + src/Test/L0/Container/DockerUtilL0.cs | 130 + src/Test/L0/DotnetsdkDownloadScriptL0.cs | 58 + src/Test/L0/ExtensionManagerL0.cs | 62 + src/Test/L0/HostContextL0.cs | 84 + src/Test/L0/Listener/AgentL0.cs | 574 ++ src/Test/L0/Listener/CommandSettingsL0.cs | 784 ++ .../AgentCapabilitiesProviderTestL0.cs | 79 + .../Configuration/AgentCredentialL0.cs | 26 + .../Configuration/ArgumentValidatorTestsL0.cs | 60 + .../Configuration/ConfigurationManagerL0.cs | 220 + .../NativeWindowsServiceHelperL0.cs | 55 + .../Configuration/PromptManagerTestsL0.cs | 228 + src/Test/L0/Listener/JobDispatcherL0.cs | 502 + src/Test/L0/Listener/MessageListenerL0.cs | 214 + src/Test/L0/PagingLoggerL0.cs | 131 + src/Test/L0/ProcessExtensionL0.cs | 70 + src/Test/L0/ProcessInvokerL0.cs | 350 + src/Test/L0/ProxyConfigL0.cs | 116 + src/Test/L0/ServiceInterfacesL0.cs | 115 + src/Test/L0/TestHostContext.cs | 373 + src/Test/L0/TestUtil.cs | 39 + src/Test/L0/Util/ArgUtilL0.cs | 148 + src/Test/L0/Util/IOUtilL0.cs | 957 ++ src/Test/L0/Util/StringUtilL0.cs | 190 + src/Test/L0/Util/TaskResultUtilL0.cs | 206 + src/Test/L0/Util/UrlUtilL0.cs | 65 + src/Test/L0/Util/VssUtilL0.cs | 56 + src/Test/L0/Util/WhichUtilL0.cs | 74 + src/Test/L0/Worker/ActionCommandL0.cs | 211 + src/Test/L0/Worker/ActionCommandManagerL0.cs | 150 + src/Test/L0/Worker/ActionManagerL0.cs | 1759 ++++ src/Test/L0/Worker/ActionManifestManagerL0.cs | 497 + src/Test/L0/Worker/ActionRunnerL0.cs | 359 + src/Test/L0/Worker/ExecutionContextL0.cs | 259 + src/Test/L0/Worker/ExpressionManagerL0.cs | 186 + src/Test/L0/Worker/IssueMatcherL0.cs | 795 ++ src/Test/L0/Worker/JobExtensionL0.cs | 386 + src/Test/L0/Worker/JobRunnerL0.cs | 223 + src/Test/L0/Worker/OutputManagerL0.cs | 728 ++ .../L0/Worker/PipelineDirectoryManagerL0.cs | 229 + src/Test/L0/Worker/StepsRunnerL0.cs | 448 + src/Test/L0/Worker/TaskCommandExtensionL0.cs | 181 + src/Test/L0/Worker/TrackingManagerL0.cs | 129 + src/Test/L0/Worker/VariablesL0.cs | 193 + src/Test/L0/Worker/WorkerL0.cs | 308 + src/Test/Properties.cs | 3 + src/Test/Test.csproj | 70 + src/Test/TestData/dockerfileaction.yml | 25 + .../dockerfileaction_arg_env_expression.yml | 25 + .../TestData/dockerfileaction_cleanup.yml | 27 + ...erfileaction_noargs_noenv_noentrypoint.yml | 19 + .../TestData/dockerfilerelativeaction.yml | 25 + src/Test/TestData/dockerhubaction.yml | 25 + src/Test/TestData/nodeaction.yml | 20 + src/Test/TestData/nodeaction_cleanup.yml | 22 + src/Test/TestData/noderelativeaction.yml | 19 + src/Test/TestData/pluginaction.yml | 18 + src/Trim-Sdk.ps1 | 99 + src/dev.cmd | 33 + src/dev.sh | 250 + src/dir.proj | 87 + src/global.json | 5 + src/runnerversion | 1 + windows.template.yml | 29 + 1255 files changed, 198670 insertions(+) create mode 100644 .gitattributes create mode 100644 .github/ISSUE_TEMPLATE.md create mode 100644 .github/workflows/build.yml create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 README.md create mode 100644 assets.json create mode 100644 azure-pipelines-release.yml create mode 100644 azure-pipelines.yml create mode 100644 docs/contribute.md create mode 100644 docs/res/apple_med.png create mode 100644 docs/res/apple_sm.png create mode 100644 docs/res/github-graph.png create mode 100644 docs/res/linux_med.png create mode 100644 docs/res/linux_sm.png create mode 100644 docs/res/redhat_med.png create mode 100644 docs/res/redhat_sm.png create mode 100644 docs/res/ubuntu_med.png create mode 100644 docs/res/ubuntu_sm.png create mode 100644 docs/res/win_med.png create mode 100644 docs/res/win_sm.png create mode 100644 docs/start/envlinux.md create mode 100644 docs/start/envosx.md create mode 100644 docs/start/envwin.md create mode 100644 images/arm/Dockerfile create mode 100644 images/centos6/Dockerfile create mode 100644 nonwindows.template.yml create mode 100644 releaseNote.md create mode 100644 src/Misc/dotnet-install.ps1 create mode 100755 src/Misc/dotnet-install.sh create mode 100755 src/Misc/externals.sh create mode 100644 src/Misc/layoutbin/RunnerService.js create mode 100644 src/Misc/layoutbin/actions.runner.plist.template create mode 100644 src/Misc/layoutbin/actions.runner.service.template create mode 100644 src/Misc/layoutbin/darwin.svc.sh.template create mode 100755 src/Misc/layoutbin/installdependencies.sh create mode 100755 src/Misc/layoutbin/runsvc.sh create mode 100644 src/Misc/layoutbin/systemd.svc.sh.template create mode 100644 src/Misc/layoutbin/update.cmd.template create mode 100644 src/Misc/layoutbin/update.sh.template create mode 100644 src/Misc/layoutroot/config.cmd create mode 100755 src/Misc/layoutroot/config.sh create mode 100755 src/Misc/layoutroot/env.sh create mode 100644 src/Misc/layoutroot/run.cmd create mode 100755 src/Misc/layoutroot/run.sh create mode 100644 src/NuGet.Config create mode 100644 src/Runner.Common/ActionCommand.cs create mode 100644 src/Runner.Common/ActionResult.cs create mode 100644 src/Runner.Common/AsyncManualResetEvent.cs create mode 100644 src/Runner.Common/Capabilities/CapabilitiesManager.cs create mode 100644 src/Runner.Common/Capabilities/RunnerCapabilitiesProvider.cs create mode 100644 src/Runner.Common/CommandLineParser.cs create mode 100644 src/Runner.Common/ConfigurationStore.cs create mode 100644 src/Runner.Common/Constants.cs create mode 100644 src/Runner.Common/CredentialData.cs create mode 100644 src/Runner.Common/Exceptions.cs create mode 100644 src/Runner.Common/ExtensionManager.cs create mode 100644 src/Runner.Common/Extensions.cs create mode 100644 src/Runner.Common/HostContext.cs create mode 100644 src/Runner.Common/HostTraceListener.cs create mode 100644 src/Runner.Common/IExtension.cs create mode 100644 src/Runner.Common/JobNotification.cs create mode 100644 src/Runner.Common/JobServer.cs create mode 100644 src/Runner.Common/JobServerQueue.cs create mode 100644 src/Runner.Common/LocationServer.cs create mode 100644 src/Runner.Common/Logging.cs create mode 100644 src/Runner.Common/ProcessChannel.cs create mode 100644 src/Runner.Common/ProcessExtensions.cs create mode 100644 src/Runner.Common/ProcessInvoker.cs create mode 100644 src/Runner.Common/Runner.Common.csproj create mode 100644 src/Runner.Common/RunnerCertificateManager.cs create mode 100644 src/Runner.Common/RunnerCredentialStore.cs create mode 100644 src/Runner.Common/RunnerServer.cs create mode 100644 src/Runner.Common/RunnerService.cs create mode 100644 src/Runner.Common/RunnerWebProxy.cs create mode 100644 src/Runner.Common/StreamString.cs create mode 100644 src/Runner.Common/Terminal.cs create mode 100644 src/Runner.Common/ThrottlingReportHandler.cs create mode 100644 src/Runner.Common/TraceManager.cs create mode 100644 src/Runner.Common/TraceSetting.cs create mode 100644 src/Runner.Common/Tracing.cs create mode 100644 src/Runner.Common/Util/EnumUtil.cs create mode 100644 src/Runner.Common/Util/PlanUtil.cs create mode 100644 src/Runner.Common/Util/TaskResultUtil.cs create mode 100644 src/Runner.Common/Util/UnixUtil.cs create mode 100644 src/Runner.Common/Util/VarUtil.cs create mode 100644 src/Runner.Listener/Agent.cs create mode 100644 src/Runner.Listener/CommandSettings.cs create mode 100644 src/Runner.Listener/Configuration/ConfigurationManager.cs create mode 100644 src/Runner.Listener/Configuration/CredentialManager.cs create mode 100644 src/Runner.Listener/Configuration/CredentialProvider.cs create mode 100644 src/Runner.Listener/Configuration/IRSAKeyManager.cs create mode 100644 src/Runner.Listener/Configuration/NativeWindowsServiceHelper.cs create mode 100644 src/Runner.Listener/Configuration/OAuthCredential.cs create mode 100644 src/Runner.Listener/Configuration/OsxServiceControlManager.cs create mode 100644 src/Runner.Listener/Configuration/PromptManager.cs create mode 100644 src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs create mode 100644 src/Runner.Listener/Configuration/RSAFileKeyManager.cs create mode 100644 src/Runner.Listener/Configuration/ServiceControlManager.cs create mode 100644 src/Runner.Listener/Configuration/SystemdControlManager.cs create mode 100644 src/Runner.Listener/Configuration/Validators.cs create mode 100644 src/Runner.Listener/Configuration/WindowsServiceControlManager.cs create mode 100644 src/Runner.Listener/JobDispatcher.cs create mode 100644 src/Runner.Listener/MessageListener.cs create mode 100644 src/Runner.Listener/Program.cs create mode 100644 src/Runner.Listener/Runner.Listener.csproj create mode 100644 src/Runner.Listener/SelfUpdater.cs create mode 100644 src/Runner.PluginHost/Program.cs create mode 100644 src/Runner.PluginHost/Runner.PluginHost.csproj create mode 100644 src/Runner.Plugins/Artifact/BuildServer.cs create mode 100644 src/Runner.Plugins/Artifact/DownloadArtifact.cs create mode 100644 src/Runner.Plugins/Artifact/FileContainerServer.cs create mode 100644 src/Runner.Plugins/Artifact/PublishArtifact.cs create mode 100644 src/Runner.Plugins/Repository/GitCliManager.cs create mode 100644 src/Runner.Plugins/Repository/v1.0/GitSourceProvider.cs create mode 100644 src/Runner.Plugins/Repository/v1.0/RepositoryPlugin.cs create mode 100644 src/Runner.Plugins/Repository/v1.1/GitSourceProvider.cs create mode 100644 src/Runner.Plugins/Repository/v1.1/RepositoryPlugin.cs create mode 100644 src/Runner.Plugins/Runner.Plugins.csproj create mode 100644 src/Runner.Sdk/ActionPlugin.cs create mode 100644 src/Runner.Sdk/ITraceWriter.cs create mode 100644 src/Runner.Sdk/ProcessInvoker.cs create mode 100644 src/Runner.Sdk/Runner.Sdk.csproj create mode 100644 src/Runner.Sdk/RunnerClientCertificateManager.cs create mode 100644 src/Runner.Sdk/RunnerWebProxyCore.cs create mode 100644 src/Runner.Sdk/Util/ArgUtil.cs create mode 100644 src/Runner.Sdk/Util/IOUtil.cs create mode 100644 src/Runner.Sdk/Util/PathUtil.cs create mode 100644 src/Runner.Sdk/Util/StringUtil.cs create mode 100644 src/Runner.Sdk/Util/UrlUtil.cs create mode 100644 src/Runner.Sdk/Util/VssUtil.cs create mode 100644 src/Runner.Sdk/Util/WhichUtil.cs create mode 100644 src/Runner.Service/Windows/App.config create mode 100644 src/Runner.Service/Windows/FinalPublicKey.snk create mode 100644 src/Runner.Service/Windows/Program.cs create mode 100644 src/Runner.Service/Windows/Properties/AssemblyInfo.cs create mode 100644 src/Runner.Service/Windows/Resource.Designer.cs create mode 100644 src/Runner.Service/Windows/Resource.resx create mode 100644 src/Runner.Service/Windows/RunnerService.Designer.cs create mode 100644 src/Runner.Service/Windows/RunnerService.cs create mode 100644 src/Runner.Service/Windows/RunnerService.csproj create mode 100644 src/Runner.Worker/ActionCommandManager.cs create mode 100644 src/Runner.Worker/ActionManager.cs create mode 100644 src/Runner.Worker/ActionManifestManager.cs create mode 100644 src/Runner.Worker/ActionRunner.cs create mode 100644 src/Runner.Worker/Container/ContainerInfo.cs create mode 100644 src/Runner.Worker/Container/DockerCommandManager.cs create mode 100644 src/Runner.Worker/Container/DockerUtil.cs create mode 100644 src/Runner.Worker/ContainerOperationProvider.cs create mode 100644 src/Runner.Worker/DiagnosticLogManager.cs create mode 100644 src/Runner.Worker/ExecutionContext.cs create mode 100644 src/Runner.Worker/ExpressionManager.cs create mode 100644 src/Runner.Worker/GitHubContext.cs create mode 100644 src/Runner.Worker/Handlers/ContainerActionHandler.cs create mode 100644 src/Runner.Worker/Handlers/Handler.cs create mode 100644 src/Runner.Worker/Handlers/HandlerFactory.cs create mode 100644 src/Runner.Worker/Handlers/NodeScriptActionHandler.cs create mode 100644 src/Runner.Worker/Handlers/OutputManager.cs create mode 100644 src/Runner.Worker/Handlers/RunnerPluginHandler.cs create mode 100644 src/Runner.Worker/Handlers/ScriptHandler.cs create mode 100644 src/Runner.Worker/Handlers/ScriptHandlerHelpers.cs create mode 100644 src/Runner.Worker/Handlers/StepHost.cs create mode 100644 src/Runner.Worker/IEnvironmentContextData.cs create mode 100644 src/Runner.Worker/IssueMatcher.cs create mode 100644 src/Runner.Worker/JobContext.cs create mode 100644 src/Runner.Worker/JobExtension.cs create mode 100644 src/Runner.Worker/JobExtensionRunner.cs create mode 100644 src/Runner.Worker/JobRunner.cs create mode 100644 src/Runner.Worker/PipelineDirectoryManager.cs create mode 100644 src/Runner.Worker/Program.cs create mode 100644 src/Runner.Worker/Runner.Worker.csproj create mode 100644 src/Runner.Worker/RunnerContext.cs create mode 100644 src/Runner.Worker/RunnerPluginManager.cs create mode 100644 src/Runner.Worker/StepsContext.cs create mode 100644 src/Runner.Worker/StepsRunner.cs create mode 100644 src/Runner.Worker/TempDirectoryManager.cs create mode 100644 src/Runner.Worker/TrackingConfig.cs create mode 100644 src/Runner.Worker/TrackingManager.cs create mode 100644 src/Runner.Worker/Variables.cs create mode 100644 src/Runner.Worker/Worker.cs create mode 100644 src/Runner.Worker/WorkerUtilties.cs create mode 100644 src/Runner.Worker/action_yaml.json create mode 100644 src/Sdk/AadAuthentication/CookieUtility.cs create mode 100644 src/Sdk/AadAuthentication/VssAadCredential.cs create mode 100644 src/Sdk/AadAuthentication/VssAadSettings.cs create mode 100644 src/Sdk/AadAuthentication/VssAadToken.cs create mode 100644 src/Sdk/AadAuthentication/VssAadTokenProvider.cs create mode 100644 src/Sdk/AadAuthentication/VssFederatedCredential.cs create mode 100644 src/Sdk/AadAuthentication/VssFederatedToken.cs create mode 100644 src/Sdk/AadAuthentication/VssFederatedTokenProvider.cs create mode 100644 src/Sdk/BuildWebApi/Api/AgentTargetExecutionType.cs create mode 100644 src/Sdk/BuildWebApi/Api/ArtifactResourceTypes.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildDefinitionExtensions.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildDefinitionHelpers.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildHttpClient.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildHttpClientCompatBase.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildIssueKeys.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildOrchestrationType.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildPermissions.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildResourceIds.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildTemplateCategories.cs create mode 100644 src/Sdk/BuildWebApi/Api/BuildVariables.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueue.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueueTarget.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/AgentSpecification.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptionsJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/ArtifactResource.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Attachment.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Build.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildArtifact.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildBadge.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildDefinition.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionRevision.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionStep.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionTemplate.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionVariable.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildLog.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildLogReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildMetric.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildOption.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinition.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinitionReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildOptionGroupDefinition.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildOptionInputDefinition.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildProcess.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildProcessJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildProcessResources.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildReportMetadata.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildRepository.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildRequestValidationResult.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildResourceUsage.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildSettings.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildTrigger.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildTriggerJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/BuildWorkspace.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Change.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DefinitionReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DefinitionResourceReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Demand.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DemandEquals.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DemandExists.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DemandJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Dependency.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinition3_2.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionReference3_2.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionTemplate3_2.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DesignerProcess.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DesignerProcessTarget.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DockerProcess.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/DockerProcessTarget.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Folder.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Forks.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Issue.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/JustInTimeProcess.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/MultipleAgentExecutionOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Phase.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/PhaseTarget.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/PhaseTargetJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/PropertyValue.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/PullRequest.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/RepositoryWebhook.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/RequestShallowReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/RetentionPolicy.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Schedule.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/ServerTarget.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptionsJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/ShallowReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/SourceProviderAttributes.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/SourceRelatedWorkItem.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/SourceRepository.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/SourceRepositoryItem.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/SvnWorkspace.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TaskAgentPoolReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanGroupReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TaskReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/Timeline.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TimelineAttempt.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TimelineRecord.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/TimelineReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/VariableGroup.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/VariableGroupReference.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersAgentExecutionOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersServerExecutionOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/WorkspaceTemplate.cs create mode 100644 src/Sdk/BuildWebApi/Api/Contracts/YamlProcess.cs create mode 100644 src/Sdk/BuildWebApi/Api/DefinitionMetrics.cs create mode 100644 src/Sdk/BuildWebApi/Api/DefinitionReferenceJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/EndpointData.cs create mode 100644 src/Sdk/BuildWebApi/Api/Enumerations.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildArtifactAddedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildChangesCalculatedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangingEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildDeletedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildDestroyedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildEvents.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildPollingSummaryEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildQueuedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildStartedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildTagsAddedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildUpdatedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/BuildsDeletedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/ConsoleLogEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/RealTimeBuildEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/SyncBuildCompletedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/SyncBuildStartedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/TaskOrchestrationPlanGroupsStartedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Events/TimelineRecordsUpdatedEvent.cs create mode 100644 src/Sdk/BuildWebApi/Api/Exceptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Generated/BuildHttpClientBase.cs create mode 100644 src/Sdk/BuildWebApi/Api/IVariableMultiplierExecutionOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/Links.cs create mode 100644 src/Sdk/BuildWebApi/Api/MetricAggregationTypes.cs create mode 100644 src/Sdk/BuildWebApi/Api/PhaseTargetType.cs create mode 100644 src/Sdk/BuildWebApi/Api/ProcessType.cs create mode 100644 src/Sdk/BuildWebApi/Api/ReportTypes.cs create mode 100644 src/Sdk/BuildWebApi/Api/RepositoryProperties.cs create mode 100644 src/Sdk/BuildWebApi/Api/RepositoryTypes.cs create mode 100644 src/Sdk/BuildWebApi/Api/Security.cs create mode 100644 src/Sdk/BuildWebApi/Api/SerializationHelper.cs create mode 100644 src/Sdk/BuildWebApi/Api/ServerTargetExecutionType.cs create mode 100644 src/Sdk/BuildWebApi/Api/SettingsSourceType.cs create mode 100644 src/Sdk/BuildWebApi/Api/TypePropertyJsonConverter.cs create mode 100644 src/Sdk/BuildWebApi/Api/WellKnownBuildOptions.cs create mode 100644 src/Sdk/BuildWebApi/Api/WellKnownDataProviderKeys.cs create mode 100644 src/Sdk/Common/Common/Authentication/FederatedCredential.cs create mode 100644 src/Sdk/Common/Common/Authentication/HttpRequestMessageWrapper.cs create mode 100644 src/Sdk/Common/Common/Authentication/HttpResponseMessageWrapper.cs create mode 100644 src/Sdk/Common/Common/Authentication/IHttpHeaders.cs create mode 100644 src/Sdk/Common/Common/Authentication/IHttpRequest.cs create mode 100644 src/Sdk/Common/Common/Authentication/IHttpResponse.cs create mode 100644 src/Sdk/Common/Common/Authentication/IVssCredentialPrompt.cs create mode 100644 src/Sdk/Common/Common/Authentication/IVssCredentialStorage.cs create mode 100644 src/Sdk/Common/Common/Authentication/IssuedToken.cs create mode 100644 src/Sdk/Common/Common/Authentication/IssuedTokenCredential.cs create mode 100644 src/Sdk/Common/Common/Authentication/IssuedTokenProvider.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssBasicCredential.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssBasicToken.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssBasicTokenProvider.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssCredentials.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssServiceIdentityCredential.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssServiceIdentityToken.cs create mode 100644 src/Sdk/Common/Common/Authentication/VssServiceIdentityTokenProvider.cs create mode 100644 src/Sdk/Common/Common/Authentication/WindowsCredential.cs create mode 100644 src/Sdk/Common/Common/Authentication/WindowsToken.cs create mode 100644 src/Sdk/Common/Common/Authentication/WindowsTokenProvider.cs create mode 100644 src/Sdk/Common/Common/ClientStorage/IVssClientStorage.cs create mode 100644 src/Sdk/Common/Common/ClientStorage/VssFileStorage.cs create mode 100644 src/Sdk/Common/Common/Diagnostics/HttpRequestMessageExtensions.cs create mode 100644 src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs create mode 100644 src/Sdk/Common/Common/Diagnostics/VssHttpMethod.cs create mode 100644 src/Sdk/Common/Common/Diagnostics/VssTraceActivity.cs create mode 100644 src/Sdk/Common/Common/ExceptionMappingAttribute.cs create mode 100644 src/Sdk/Common/Common/Exceptions/AuthenticationExceptions.cs create mode 100644 src/Sdk/Common/Common/Exceptions/CommonExceptions.cs create mode 100644 src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs create mode 100644 src/Sdk/Common/Common/GenerateConstantAttributes.cs create mode 100644 src/Sdk/Common/Common/IVssClientCertificateManager.cs create mode 100644 src/Sdk/Common/Common/IVssHttpRetryInfo.cs create mode 100644 src/Sdk/Common/Common/Performance/PerformanceTimerConstants.cs create mode 100644 src/Sdk/Common/Common/Performance/PerformanceTimingGroup.cs create mode 100644 src/Sdk/Common/Common/TaskCancellationExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/ArgumentUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/ArrayUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/BackoffTimerHelper.cs create mode 100644 src/Sdk/Common/Common/Utility/CollectionsExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/ConvertUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/DictionaryExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/EnumerableExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/ExpectedExceptionExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/HttpHeaders.cs create mode 100644 src/Sdk/Common/Common/Utility/LongPathUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/PartitioningResults.cs create mode 100644 src/Sdk/Common/Common/Utility/PathUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/PrimitiveExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/PropertyValidation.cs create mode 100644 src/Sdk/Common/Common/Utility/SecretUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/SecureCompare.cs create mode 100644 src/Sdk/Common/Common/Utility/StreamParser.cs create mode 100644 src/Sdk/Common/Common/Utility/TypeExtensionMethods.cs create mode 100644 src/Sdk/Common/Common/Utility/UriExtensions.cs create mode 100644 src/Sdk/Common/Common/Utility/UriUtility.cs create mode 100644 src/Sdk/Common/Common/Utility/VssStringComparer.cs create mode 100644 src/Sdk/Common/Common/Utility/XmlUtility.cs create mode 100644 src/Sdk/Common/Common/VssCommonConstants.cs create mode 100644 src/Sdk/Common/Common/VssException.cs create mode 100644 src/Sdk/Common/Common/VssHttpMessageHandler.cs create mode 100644 src/Sdk/Common/Common/VssHttpMessageHandlerTraceInfo.cs create mode 100644 src/Sdk/Common/Common/VssHttpRequestSettings.cs create mode 100644 src/Sdk/Common/Common/VssHttpRetryMessageHandler.cs create mode 100644 src/Sdk/Common/Common/VssHttpRetryOptions.cs create mode 100644 src/Sdk/Common/Common/VssNetworkHelper.cs create mode 100644 src/Sdk/Common/Common/VssPerformanceEventSource.cs create mode 100644 src/Sdk/Common/EmbeddedVersionInfo.cs create mode 100644 src/Sdk/CoreWebApi/Core/ProjectClasses.cs create mode 100644 src/Sdk/CoreWebApi/Core/TeamProjectReference.cs create mode 100644 src/Sdk/DTContracts/Contracts/AuthorizationHeader.cs create mode 100644 src/Sdk/DTContracts/Contracts/DataSourceBinding.cs create mode 100644 src/Sdk/DTContracts/Contracts/ProcessParameters.cs create mode 100644 src/Sdk/DTContracts/Contracts/TaskInputDefinition.cs create mode 100644 src/Sdk/DTContracts/Contracts/TaskInputType.cs create mode 100644 src/Sdk/DTContracts/Contracts/TaskInputValidation.cs create mode 100644 src/Sdk/DTContracts/Contracts/TaskSourceDefinition.cs create mode 100644 src/Sdk/DTExpressions/Expressions/AndNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CoalesceNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/JArrayAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/JObjectAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonDictionaryContractAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonObjectContractAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/ListOfObjectAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringObjectAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringStringAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyListOfObjectAccessor.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ContainerNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ContainsNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ContainsValueNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ConversionResult.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EndsWithNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EqualNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EvaluationContext.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EvaluationMemory.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EvaluationOptions.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EvaluationResult.cs create mode 100644 src/Sdk/DTExpressions/Expressions/EvaluationTraceWriter.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ExpressionConstants.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ExpressionException.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ExpressionNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ExpressionParser.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ExpressionParserOptions.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ExpressionUtil.cs create mode 100644 src/Sdk/DTExpressions/Expressions/FormatNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/FunctionInfo.cs create mode 100644 src/Sdk/DTExpressions/Expressions/FunctionNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/GreaterThanNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/GreaterThanOrEqualNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IBoolean.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IExpressionNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IFunctionInfo.cs create mode 100644 src/Sdk/DTExpressions/Expressions/INamedValueInfo.cs create mode 100644 src/Sdk/DTExpressions/Expressions/INumber.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IReadOnlyArray.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IReadOnlyObject.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IString.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ITraceWriter.cs create mode 100644 src/Sdk/DTExpressions/Expressions/InNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/IndexerNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/JoinNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/LessThanNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/LessThanOrEqualNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/LexicalAnalyzer.cs create mode 100644 src/Sdk/DTExpressions/Expressions/LiteralValueNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/MemoryCounter.cs create mode 100644 src/Sdk/DTExpressions/Expressions/NamedValueInfo.cs create mode 100644 src/Sdk/DTExpressions/Expressions/NamedValueNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/NotEqualNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/NotInNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/NotNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/OrNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ParseException.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ParseExceptionKind.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ResultMemory.cs create mode 100644 src/Sdk/DTExpressions/Expressions/StartsWithNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/Token.cs create mode 100644 src/Sdk/DTExpressions/Expressions/TokenKind.cs create mode 100644 src/Sdk/DTExpressions/Expressions/TypeCastException.cs create mode 100644 src/Sdk/DTExpressions/Expressions/UnknownFunctionNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/UnknownNamedValueNode.cs create mode 100644 src/Sdk/DTExpressions/Expressions/ValueKind.cs create mode 100644 src/Sdk/DTExpressions/Expressions/XOrNode.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/EvaluationOptions.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/EvaluationResult.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ExpressionConstants.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ExpressionException.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ExpressionParser.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/FunctionInfo.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/IExpressionNode.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/IExpressionNodeExtensions.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/IFunctionInfo.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/INamedValueInfo.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ITraceWriter.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/NamedValueInfo.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ParseException.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ParseExceptionKind.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Container.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationContext.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationMemory.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationTraceWriter.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionNode.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionUtility.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Function.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Contains.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/EndsWith.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Format.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/HashFiles.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Join.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/NoOperation.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/StartsWith.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/ToJson.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/IBoolean.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/INull.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/INumber.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyArray.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyObject.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/IString.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Literal.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/MemoryCounter.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/NamedValue.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/NoOperationNamedValue.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/And.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Equal.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThan.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThanOrEqual.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Index.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThan.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThanOrEqual.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Not.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/NotEqual.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Or.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/ResultMemory.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Sdk/Wildcard.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Tokens/Associativity.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Tokens/LexicalAnalyzer.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Tokens/Token.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/Tokens/TokenKind.cs create mode 100644 src/Sdk/DTExpressions2/Expressions2/ValueKind.cs create mode 100644 src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs create mode 100644 src/Sdk/DTGenerated/Generated/TaskHttpClientBase.cs create mode 100644 src/Sdk/DTLogging/Logging/ISecret.cs create mode 100644 src/Sdk/DTLogging/Logging/ISecretMasker.cs create mode 100644 src/Sdk/DTLogging/Logging/RegexSecret.cs create mode 100644 src/Sdk/DTLogging/Logging/ReplacementPosition.cs create mode 100644 src/Sdk/DTLogging/Logging/SecretMasker.cs create mode 100644 src/Sdk/DTLogging/Logging/ValueEncoders.cs create mode 100644 src/Sdk/DTLogging/Logging/ValueSecret.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/ContextValueNode.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/EmptyTraceWriter.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/ExpressionTraceWriter.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectReader.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectWriter.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriter.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriterExtensions.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/BooleanDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/Definition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/DefinitionType.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/MappingDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NullDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NumberDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/OneOfDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/PropertyValue.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/ScalarDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/SequenceDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/StringDefinition.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/TemplateSchema.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateConstants.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateContext.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateEvaluator.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateException.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateMemory.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateReader.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateUnraveler.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationError.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationErrors.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateWriter.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BasicExpressionToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BooleanToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ExpressionToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/InsertExpressionToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/LiteralToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/MappingToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NullToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NumberToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ScalarToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/SequenceToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/StringToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateToken.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenExtensions.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenJsonConverter.cs create mode 100644 src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TokenType.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ActionStep.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReferenceConverter.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessageUtil.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentPoolReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentPoolStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentPoolTarget.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentQueueReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentQueueStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/AgentQueueTarget.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Artifacts/ArtifactConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Artifacts/DownloadStepExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Artifacts/IArtifactResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Artifacts/PipelineArtifactConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Artifacts/YamlArtifactConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/BuildOptions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/BuildResource.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointDecision.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointScope.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Checkpoints/ResourceInfo.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ConditionResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContainerResource.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/ArrayContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/BooleanContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/CaseSensitiveDictionaryContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/DictionaryContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/JTokenExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/NumberContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataJsonConverter.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataType.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/StringContextData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/TemplateMemoryExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextData/TemplateTokenExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContextScope.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ContinuousIntegrationTrigger.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/CounterStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/CreateJobResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/DeploymentExecutionOptions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/DeploymentGroupTarget.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Environment/EnvironmentReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/EnvironmentDeploymentTarget.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/EnvironmentStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ExecutionOptions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ExpandPhaseResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ExpressionResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ExpressionValue.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/CounterNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/ExpressionConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/InputValidationConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/InputValueNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/IsEmailNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/IsIPv4AddressNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/IsInRangeNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/IsMatchNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/IsSHA1Node.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/IsUrlNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/LengthNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/PipelineContextNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/RegexUtility.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/VariablesContextNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Expressions/WellKnownRegularExpressions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/GraphCondition.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/GroupStep.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IAgentPoolResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IAgentPoolStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IAgentQueueResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IAgentQueueStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ICounterResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ICounterStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IEnvironmentResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IEnvironmentStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IGraphNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IJobFactory.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IPackageStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IPhaseProvider.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IPipelineContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IPipelineContextExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IPipelineIdGenerator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IResourceStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IResourceStoreExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ISecureFileResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ISecureFileStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IServiceEndpointResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IServiceEndpointStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IStepProvider.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ITaskResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ITaskStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ITaskTemplateResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ITaskTemplateStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IVariable.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IVariableGroupResolver.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IVariableGroupStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/IVariableValueProvider.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Job.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/JobContainer.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/JobExpansionOptions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/JobFactory.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/JobResources.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/JobStep.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/GraphConditionNamedValue.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/IFileProvider.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JobDisplayNameBuilder.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JsonObjectReader.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/MatrixBuilder.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseOptions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateParser.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateSchemaFactory.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TaskResultExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TemplateReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectReader.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectWriter.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlTemplateLoader.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PackageStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ParallelExecutionOptions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Phase.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PhaseCondition.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PhaseDependency.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PhaseNode.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PhaseTarget.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PhaseTargetType.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineBuildContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineBuildResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineBuilder.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineConstants.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineContextBase.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineContextBuilder.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineEnvironment.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineException.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineIdGenerator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineProcess.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineResource.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineResources.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineState.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineStepsTemplate.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineTemplate.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineTrigger.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineTriggerType.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineUtilities.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineValidationError.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PipelineValidationErrors.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ProviderPhase.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/PullRequestTrigger.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/RepositoryResource.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/RepositoryTypes.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Resource.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ResourceComparer.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ResourceProperties.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ResourceReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ResourceStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/AgentJobStartedData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/GraphExecutionContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/GraphNodeInstance.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/JobAttempt.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/JobExecutionContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/JobInstance.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/JobStartedEventData.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/PhaseAttempt.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/PhaseExecutionContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/PhaseInstance.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/PipelineAttemptBuilder.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/PipelineExecutionContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/StageAttempt.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/StageExecutionContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Runtime/StageInstance.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/SecretStoreConfiguration.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/SecureFileReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/SecureFileStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ServerTarget.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ServiceEndpointReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/ServiceEndpointStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Stage.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/StageCondition.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Step.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/StepConverter.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/StrategyResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskCondition.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskDefinitionExtensions.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskStep.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskStepDefinitionReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskTemplateReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskTemplateStep.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TaskTemplateStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/TimelineRecordIdGenerator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/GraphValidator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/IInputValidator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/InputValidationContext.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/InputValidationResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/InputValidator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/NameValidation.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/ScriptTaskValidator.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Validation/ValidationResult.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/Variable.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/VariableGroupReference.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/VariableGroupStore.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/VariablesDictionary.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/WorkspaceMapping.cs create mode 100644 src/Sdk/DTPipelines/Pipelines/WorkspaceOptions.cs create mode 100644 src/Sdk/DTPipelines/workflow-v1.0.json create mode 100644 src/Sdk/DTWebApi/WebApi/AgentJobRequestMessage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/AgentRefreshMessage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/AuditAction.cs create mode 100644 src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableGroupProviderData.cs create mode 100644 src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableValue.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Demand.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DemandEquals.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DemandExists.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DemandJsonConverter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DemandMinimumVersion.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroupActionFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroupCreateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroupExpands.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroupMetrics.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroupReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentGroupUpdateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentMachine.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentMachineExpands.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentMachineGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentMachineGroupReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentPoolSummary.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentPoolSummaryExpands.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentTargetExpands.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DeploymentTargetUpdateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/DiagnosticLogMetadata.cs create mode 100644 src/Sdk/DTWebApi/WebApi/EnableAccessTokenType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentCreateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentDeploymentExecutionRecord.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentExpands.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentInstance.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentLinkedResourceReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResource.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/EnvironmentUpdateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/KubernetesResource.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/KubernetesResourceCreateParameters.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/VirtualMachine.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroupCreateParameters.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Exceptions.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ExpressionValidationItem.cs create mode 100644 src/Sdk/DTWebApi/WebApi/IOrchestrationProcess.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ITaskDefinitionReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/IdentityRefExtensions.cs create mode 100644 src/Sdk/DTWebApi/WebApi/InputBindingContext.cs create mode 100644 src/Sdk/DTWebApi/WebApi/InputValidationItem.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Issue.cs create mode 100644 src/Sdk/DTWebApi/WebApi/IssueType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobCancelMessage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobEnvironment.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobEvent.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobExecutionModeTypes.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobOption.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobRequestMessage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobRequestMessageJsonConverter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/JobRequestMessageTypes.cs create mode 100644 src/Sdk/DTWebApi/WebApi/MachineGroupActionFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/MarketplacePurchasedLicense.cs create mode 100644 src/Sdk/DTWebApi/WebApi/MaskHint.cs create mode 100644 src/Sdk/DTWebApi/WebApi/MaskType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/MetricsColumn.cs create mode 100644 src/Sdk/DTWebApi/WebApi/MetricsRow.cs create mode 100644 src/Sdk/DTWebApi/WebApi/PackageMetadata.cs create mode 100644 src/Sdk/DTWebApi/WebApi/PackageVersion.cs create mode 100644 src/Sdk/DTWebApi/WebApi/PlanEnvironment.cs create mode 100644 src/Sdk/DTWebApi/WebApi/PlanGroupStatus.cs create mode 100644 src/Sdk/DTWebApi/WebApi/PlanTemplateType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ProjectReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/PublishTaskGroupMetadata.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ResourceLimit.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ResourceUsage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/SecureFile.cs create mode 100644 src/Sdk/DTWebApi/WebApi/SecureFileActionFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServerTaskRequestMessage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServerTaskSectionExecutionOutput.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadLoginPromptOption.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenRequest.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenResult.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureKeyVaultPermission.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroupQueryResult.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermission.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermissionResourceProviders.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureResourcePermission.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureRoleAssignmentPermission.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscription.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscriptionQueryResult.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/DataSourceBinding.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/EndpointAuthorization.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/SerializationHelper.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpoint.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpointTypes.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgent.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentAuthorization.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentCloud.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentCloudRequest.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentCloudType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentDelaySource.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentHttpClientCompatBase.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentJob.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentJobRequest.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentJobResultFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentJobStep.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentJobTask.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentJobVariable.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentMessage.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPool.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolActionFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceDefinition.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJob.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobResult.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobStatus.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobTargetAgent.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceOptions.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceRetentionPolicy.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceSchedule.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceScheduleDays.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPoolType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentProvisiongStateConstants.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentPublicKey.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentQueue.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentQueueActionFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentSessionKey.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentStatus.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentStatusFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentUpdate.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAgentUpdateReason.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskAttachment.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskDefinition.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskDefinitionEndpoint.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskDefinitionReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskDefinitionType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskExecution.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupCreateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupDefinition.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupExpands.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupQueryOrder.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupRevision.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupStep.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskGroupUpdateParameter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskHttpClient.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskHubLicenseDetails.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskInputDefinition.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskInputType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskInstance.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskLog.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskLogReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationContainer.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationItem.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemJsonConverter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationJob.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationOwner.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlan.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanGroupsQueueMetrics.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanState.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlan.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlanGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskOutputVariable.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskResourceIds.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskResult.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskRunsOnConstants.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskSourceDefinition.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskVersion.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TaskVersionSpec.cs create mode 100644 src/Sdk/DTWebApi/WebApi/Timeline.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TimelineAttempt.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TimelineRecord.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TimelineRecordFeedLinesWrapper.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TimelineRecordState.cs create mode 100644 src/Sdk/DTWebApi/WebApi/TimelineReference.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ValidationItem.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ValidationItemJsonConverter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ValidationRequest.cs create mode 100644 src/Sdk/DTWebApi/WebApi/ValidationTypes.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroup.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroupActionFilter.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroupParameters.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroupProviderData.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroupQueryOrder.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroupType.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableGroupUtility.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableUtility.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VariableValue.cs create mode 100644 src/Sdk/DTWebApi/WebApi/VersionParser.cs create mode 100644 src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs create mode 100644 src/Sdk/DTWebApi/WebApi/WellKnownPackageTypes.cs create mode 100644 src/Sdk/DTWebApi/WebApi/WellKnownServiceEndpointNames.cs create mode 100644 src/Sdk/InternalsVisibleTo.cs create mode 100644 src/Sdk/Namespaces.cs create mode 100644 src/Sdk/Resources/CommonResources.g.cs create mode 100644 src/Sdk/Resources/ContentResources.g.cs create mode 100644 src/Sdk/Resources/ExpressionResources.g.cs create mode 100644 src/Sdk/Resources/FileContainerResources.g.cs create mode 100644 src/Sdk/Resources/GraphResources.g.cs create mode 100644 src/Sdk/Resources/IdentityResources.g.cs create mode 100644 src/Sdk/Resources/JwtResources.g.cs create mode 100644 src/Sdk/Resources/LocationResources.g.cs create mode 100644 src/Sdk/Resources/PatchResources.g.cs create mode 100644 src/Sdk/Resources/PipelineStrings.g.cs create mode 100644 src/Sdk/Resources/SecurityResources.g.cs create mode 100644 src/Sdk/Resources/TemplateStrings.g.cs create mode 100644 src/Sdk/Resources/WebApiResources.g.cs create mode 100644 src/Sdk/Sdk.csproj create mode 100644 src/Sdk/WebApi/WebApi/Attributes.cs create mode 100644 src/Sdk/WebApi/WebApi/Constants.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Common/SocialDescriptor.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Common/SubjectDescriptor.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AccessTokenResult.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrant.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrantJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/GrantType.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/JwtBearerAuthorizationGrant.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/RefreshTokenGrant.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/TokenError.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FileContainer/Enumerations.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainer.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainerItem.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDataType.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDescriptor.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FormInput/InputMode.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValidation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValues.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroup.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContextJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphMember.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphScope.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubject.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectBase.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSystemSubject.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUser.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserCreationContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserCreationContextJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContextJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Graph/Constants.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/ChangedIdentities.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/CreateGroupsInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/CreateScopeInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityType.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/GroupMembership.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IReadOnlyVssIdentity.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IVssIdentity.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/Identity.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityBatchInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityCollections.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityDescriptor.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityEnumerations.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityMetaType.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityScope.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySelf.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySnapshot.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityUpdateData.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/RequestHeadersContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/SequenceContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/SwapIdentityInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Identity/TenantInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/AccountLicense.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/Definitions.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/License.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseComparer.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseTypeConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Licensing/MsdnLicense.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Location/AccessMapping.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Location/ConnectionData.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Location/Constants.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Location/LocationMapping.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Location/ResourceAreaInfo.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Location/ServiceDefinition.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/PagedList.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/AddPatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/Exceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchDocument.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplied.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplying.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchDocument.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/ObjectDictionaryConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/Operation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationAppliedEventArgs.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationApplyingEventArgs.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/RemovePatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/ReplacePatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Patch/TestPatchOperation.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/AttributeDescriptor.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesContainer.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesQueryContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/Avatar.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/CoreProfileAttribute.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/ITimeStamped.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/IVersioned.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/Profile.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttribute.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttributeBase.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileQueryContext.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/PropertiesCollection.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLink.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLinks.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Users/UpdateUserParameters.cs create mode 100644 src/Sdk/WebApi/WebApi/Contracts/Users/User.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/CommonRestExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/FileContainerExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/GraphExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/IdentityExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/LocationExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/SecurityExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Exceptions/VssApiResourceExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/HttpClients/Constants.cs create mode 100644 src/Sdk/WebApi/WebApi/HttpClients/FileContainerHttpClient.cs create mode 100644 src/Sdk/WebApi/WebApi/HttpClients/IdentityHttpClient.cs create mode 100644 src/Sdk/WebApi/WebApi/HttpClients/LocationHttpClient.cs create mode 100644 src/Sdk/WebApi/WebApi/IdentityRef.cs create mode 100644 src/Sdk/WebApi/WebApi/JsonUtility.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/IJsonWebTokenHeaderProvider.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenConstants.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenUtilities.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenValidationParameters.cs create mode 100644 src/Sdk/WebApi/WebApi/Jwt/UnixEpochDateTimeConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/Location/Interfaces.cs create mode 100644 src/Sdk/WebApi/WebApi/Location/LocationCacheManager.cs create mode 100644 src/Sdk/WebApi/WebApi/Location/LocationServerMapCache.cs create mode 100644 src/Sdk/WebApi/WebApi/Location/LocationService.cs create mode 100644 src/Sdk/WebApi/WebApi/Location/LocationXmlOperator.cs create mode 100644 src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/IVssOAuthTokenParameterProvider.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessToken.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessTokenCredential.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredential.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialType.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialsGrant.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthConstants.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthCredential.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrant.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrantType.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerAssertion.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerClientCredential.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenHttpClient.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenParameters.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenProvider.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenRequest.cs create mode 100644 src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenResponse.cs create mode 100644 src/Sdk/WebApi/WebApi/Profile/ProfileArgumentValidation.cs create mode 100644 src/Sdk/WebApi/WebApi/ProxyAuthenticationRequiredException.cs create mode 100644 src/Sdk/WebApi/WebApi/PublicAccessJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/ResourceLocationIds.cs create mode 100644 src/Sdk/WebApi/WebApi/ServiceEvent.cs create mode 100644 src/Sdk/WebApi/WebApi/TaskExtensions.cs create mode 100644 src/Sdk/WebApi/WebApi/Utilities/AsyncLock.cs create mode 100644 src/Sdk/WebApi/WebApi/Utilities/BaseSecuredObject.cs create mode 100644 src/Sdk/WebApi/WebApi/Utilities/ClientGeneratorAttributes.cs create mode 100644 src/Sdk/WebApi/WebApi/Utilities/ISecuredObject.cs create mode 100644 src/Sdk/WebApi/WebApi/Utilities/UserAgentUtility.cs create mode 100644 src/Sdk/WebApi/WebApi/Utilities/XmlSerializableDataContractExtensions.cs create mode 100644 src/Sdk/WebApi/WebApi/VssApiResourceLocation.cs create mode 100644 src/Sdk/WebApi/WebApi/VssApiResourceLocationCollection.cs create mode 100644 src/Sdk/WebApi/WebApi/VssApiResourceVersion.cs create mode 100644 src/Sdk/WebApi/WebApi/VssApiResourceVersionExtensions.cs create mode 100644 src/Sdk/WebApi/WebApi/VssCamelCasePropertyNamesContractResolver.cs create mode 100644 src/Sdk/WebApi/WebApi/VssClientHttpRequestSettings.cs create mode 100644 src/Sdk/WebApi/WebApi/VssClientSettings.cs create mode 100644 src/Sdk/WebApi/WebApi/VssConnectMode.cs create mode 100644 src/Sdk/WebApi/WebApi/VssConnection.cs create mode 100644 src/Sdk/WebApi/WebApi/VssConnectionParameterKeys.cs create mode 100644 src/Sdk/WebApi/WebApi/VssEventId.cs create mode 100644 src/Sdk/WebApi/WebApi/VssHttpClientBase.cs create mode 100644 src/Sdk/WebApi/WebApi/VssHttpUriUtility.cs create mode 100644 src/Sdk/WebApi/WebApi/VssJsonCollectionWrapper.cs create mode 100644 src/Sdk/WebApi/WebApi/VssJsonCreationConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/VssJsonMediaTypeFormatter.cs create mode 100644 src/Sdk/WebApi/WebApi/VssRequestTimerTrace.cs create mode 100644 src/Sdk/WebApi/WebApi/VssResponseContext.cs create mode 100644 src/Sdk/WebApi/WebApi/VssSecureJsonConverter.cs create mode 100644 src/Sdk/WebApi/WebApi/VssServiceResponseException.cs create mode 100644 src/Sdk/WebApi/WebApi/VssSigningCredentials.cs create mode 100644 src/Sdk/WebApi/WebApi/WrappedException.cs create mode 100644 src/Sdk/nuget.config create mode 100644 src/Sync-Sdk.ps1 create mode 100644 src/Test/L0/CommandLineParserL0.cs create mode 100644 src/Test/L0/ConstantGenerationL0.cs create mode 100644 src/Test/L0/Container/ContainerInfoL0.cs create mode 100644 src/Test/L0/Container/DockerUtilL0.cs create mode 100644 src/Test/L0/DotnetsdkDownloadScriptL0.cs create mode 100644 src/Test/L0/ExtensionManagerL0.cs create mode 100644 src/Test/L0/HostContextL0.cs create mode 100644 src/Test/L0/Listener/AgentL0.cs create mode 100644 src/Test/L0/Listener/CommandSettingsL0.cs create mode 100644 src/Test/L0/Listener/Configuration/AgentCapabilitiesProviderTestL0.cs create mode 100644 src/Test/L0/Listener/Configuration/AgentCredentialL0.cs create mode 100644 src/Test/L0/Listener/Configuration/ArgumentValidatorTestsL0.cs create mode 100644 src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs create mode 100644 src/Test/L0/Listener/Configuration/NativeWindowsServiceHelperL0.cs create mode 100644 src/Test/L0/Listener/Configuration/PromptManagerTestsL0.cs create mode 100644 src/Test/L0/Listener/JobDispatcherL0.cs create mode 100644 src/Test/L0/Listener/MessageListenerL0.cs create mode 100644 src/Test/L0/PagingLoggerL0.cs create mode 100644 src/Test/L0/ProcessExtensionL0.cs create mode 100644 src/Test/L0/ProcessInvokerL0.cs create mode 100644 src/Test/L0/ProxyConfigL0.cs create mode 100644 src/Test/L0/ServiceInterfacesL0.cs create mode 100644 src/Test/L0/TestHostContext.cs create mode 100644 src/Test/L0/TestUtil.cs create mode 100644 src/Test/L0/Util/ArgUtilL0.cs create mode 100644 src/Test/L0/Util/IOUtilL0.cs create mode 100644 src/Test/L0/Util/StringUtilL0.cs create mode 100644 src/Test/L0/Util/TaskResultUtilL0.cs create mode 100644 src/Test/L0/Util/UrlUtilL0.cs create mode 100644 src/Test/L0/Util/VssUtilL0.cs create mode 100644 src/Test/L0/Util/WhichUtilL0.cs create mode 100644 src/Test/L0/Worker/ActionCommandL0.cs create mode 100644 src/Test/L0/Worker/ActionCommandManagerL0.cs create mode 100644 src/Test/L0/Worker/ActionManagerL0.cs create mode 100644 src/Test/L0/Worker/ActionManifestManagerL0.cs create mode 100644 src/Test/L0/Worker/ActionRunnerL0.cs create mode 100644 src/Test/L0/Worker/ExecutionContextL0.cs create mode 100644 src/Test/L0/Worker/ExpressionManagerL0.cs create mode 100644 src/Test/L0/Worker/IssueMatcherL0.cs create mode 100644 src/Test/L0/Worker/JobExtensionL0.cs create mode 100644 src/Test/L0/Worker/JobRunnerL0.cs create mode 100644 src/Test/L0/Worker/OutputManagerL0.cs create mode 100644 src/Test/L0/Worker/PipelineDirectoryManagerL0.cs create mode 100644 src/Test/L0/Worker/StepsRunnerL0.cs create mode 100644 src/Test/L0/Worker/TaskCommandExtensionL0.cs create mode 100644 src/Test/L0/Worker/TrackingManagerL0.cs create mode 100644 src/Test/L0/Worker/VariablesL0.cs create mode 100644 src/Test/L0/Worker/WorkerL0.cs create mode 100644 src/Test/Properties.cs create mode 100644 src/Test/Test.csproj create mode 100644 src/Test/TestData/dockerfileaction.yml create mode 100644 src/Test/TestData/dockerfileaction_arg_env_expression.yml create mode 100644 src/Test/TestData/dockerfileaction_cleanup.yml create mode 100644 src/Test/TestData/dockerfileaction_noargs_noenv_noentrypoint.yml create mode 100644 src/Test/TestData/dockerfilerelativeaction.yml create mode 100644 src/Test/TestData/dockerhubaction.yml create mode 100644 src/Test/TestData/nodeaction.yml create mode 100644 src/Test/TestData/nodeaction_cleanup.yml create mode 100644 src/Test/TestData/noderelativeaction.yml create mode 100644 src/Test/TestData/pluginaction.yml create mode 100644 src/Trim-Sdk.ps1 create mode 100644 src/dev.cmd create mode 100755 src/dev.sh create mode 100644 src/dir.proj create mode 100644 src/global.json create mode 100644 src/runnerversion create mode 100644 windows.template.yml diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000000..b1fc51b0236 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,87 @@ +############################################################################### +# Set default behavior to automatically normalize line endings. +############################################################################### +* text=auto + +# Shell scripts should always use line feed not crlf +*.sh text eol=lf + +############################################################################### +# Set default behavior for command prompt diff. +# +# This is need for earlier builds of msysgit that does not have it on by +# default for csharp files. +# Note: This is only used by command line +############################################################################### +#*.cs diff=csharp + +############################################################################### +# Set the merge driver for project and solution files +# +# Merging from the command prompt will add diff markers to the files if there +# are conflicts (Merging from VS is not affected by the settings below, in VS +# the diff markers are never inserted). Diff markers may cause the following +# file extensions to fail to load in VS. An alternative would be to treat +# these files as binary and thus will always conflict and require user +# intervention with every merge. To do so, just uncomment the entries below +############################################################################### +*.js text +*.json text +*.resjson text +*.htm text +*.html text +*.xml text +*.txt text +*.ini text +*.inc text +#*.sln merge=binary +#*.csproj merge=binary +#*.vbproj merge=binary +#*.vcxproj merge=binary +#*.vcproj merge=binary +#*.dbproj merge=binary +#*.fsproj merge=binary +#*.lsproj merge=binary +#*.wixproj merge=binary +#*.modelproj merge=binary +#*.sqlproj merge=binary +#*.wwaproj merge=binary + +############################################################################### +# behavior for image files +# +# image files are treated as binary by default. +############################################################################### +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.ico binary +*.mov binary +*.mp4 binary +*.mp3 binary +*.flv binary +*.fla binary +*.swf binary +*.gz binary +*.zip binary +*.7z binary +*.ttf binary + +############################################################################### +# diff behavior for common document formats +# +# Convert binary document formats to text before diffing them. This feature +# is only available from the command line. Turn it on by uncommenting the +# entries below. +############################################################################### +*.doc diff=astextplain +*.DOC diff=astextplain +*.docx diff=astextplain +*.DOCX diff=astextplain +*.dot diff=astextplain +*.DOT diff=astextplain +*.pdf diff=astextplain +*.PDF diff=astextplain +*.rtf diff=astextplain +*.RTF diff=astextplain diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 00000000000..13d002d9a05 --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,10 @@ +## Runner Version and Platform +Version of your runner? + +OS of the machine running the runner? OSX/Windows/Linux/... + +## What's not working? +Please include error messages and screenshots. + +## Runner and Worker's Diagnostic Logs +Logs are located in the runner's `_diag` folder. The runner logs are prefixed with `Runner_` and the worker logs are prefixed with `Worker_`. All sensitive information should already be masked out, but please double-check before pasting here. diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 00000000000..52fca6c9ae6 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,52 @@ +name: Runner CI + +on: + push: + branches: + - master + pull_request: + branches: + - '*' + +jobs: + build: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + include: + - os: ubuntu-latest + devScript: ./dev.sh + - os: macOS-latest + devScript: ./dev.sh + - os: windows-latest + devScript: dev.cmd + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v1 + + # Build runner layout + - name: Build & Layout Release + run: | + ${{ matrix.devScript }} layout Release + working-directory: src + + # Run tests + - name: L0 + run: | + ${{ matrix.devScript }} test + working-directory: src + + # Create runner package tar.gz/zip + - name: Package Release + if: github.event_name != 'pull_request' + run: | + ${{ matrix.devScript }} package Release + working-directory: src + + # Upload runner package tar.gz/zip as artifact + - name: Publish Artifact + if: github.event_name != 'pull_request' + uses: actions/upload-artifact@v1 + with: + name: runner-package-${{ matrix.os }} + path: _package diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000000..3ce91e72ed5 --- /dev/null +++ b/.gitignore @@ -0,0 +1,23 @@ +**/bin +**/obj +**/libs +**/*.xproj +**/*.xproj.user +**/*.sln +**/.vs +**/.vscode +**/*.error +**/*.json.pretty +node_modules +_downloads +_layout +_package +_dotnetsdk +TestResults +TestLogs +.DS_Store +**/*.DotSettings.user + +#generated +src/Runner.Sdk/BuildConstants.cs + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000000..fd74dc7cb92 --- /dev/null +++ b/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 00000000000..f586f61b74f --- /dev/null +++ b/README.md @@ -0,0 +1,31 @@ +# GitHub Actions Runner + +

+ +

+ +[![Actions Status](https://github.com/actions/runner/workflows/Runner%20CI/badge.svg)](https://github.com/actions/runner/actions) + +## Get Started + +![win](docs/res/win_sm.png) [Pre-reqs](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases/latest) + +![macOS](docs/res/apple_sm.png) [Pre-reqs](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases/latest) + +![linux](docs/res/linux_sm.png) [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases/latest) + +**Configure:** + +*MacOS and Linux* +```bash +./config.sh +``` + +*Windows* +```bash +config.cmd +``` + +## Contribute + +For developers that want to contribute, [read here](docs/contribute.md) on how to build and test. diff --git a/assets.json b/assets.json new file mode 100644 index 00000000000..a6cfba2f00b --- /dev/null +++ b/assets.json @@ -0,0 +1,20 @@ +[ + { + "name": "actions-runner-win-x64-.zip", + "platform": "win-x64", + "version": "", + "downloadUrl": "https://githubassets.azureedge.net/runners//actions-runner-win-x64-.zip" + }, + { + "name": "actions-runner-osx-x64-.tar.gz", + "platform": "osx-x64", + "version": "", + "downloadUrl": "https://githubassets.azureedge.net/runners//actions-runner-osx-x64-.tar.gz" + }, + { + "name": "actions-runner-linux-x64-.tar.gz", + "platform": "linux-x64", + "version": "", + "downloadUrl": "https://githubassets.azureedge.net/runners//actions-runner-linux-x64-.tar.gz" + } +] \ No newline at end of file diff --git a/azure-pipelines-release.yml b/azure-pipelines-release.yml new file mode 100644 index 00000000000..25ea0aa2afc --- /dev/null +++ b/azure-pipelines-release.yml @@ -0,0 +1,177 @@ +stages: +- stage: Build + jobs: + ################################################################################ + - job: build_windows_agent_x64 + ################################################################################ + displayName: Windows Agent (x64) + pool: + vmImage: vs2017-win2016 + steps: + + # Steps template for windows platform + - template: windows.template.yml + + # Package dotnet core windows dependency (VC++ Redistributable) + - powershell: | + Write-Host "Downloading 'VC++ Redistributable' package." + $outDir = Join-Path -Path $env:TMP -ChildPath ([Guid]::NewGuid()) + New-Item -Path $outDir -ItemType directory + $outFile = Join-Path -Path $outDir -ChildPath "ucrt.zip" + Invoke-WebRequest -Uri https://vstsagenttools.blob.core.windows.net/tools/ucrt/ucrt_x64.zip -OutFile $outFile + Write-Host "Unzipping 'VC++ Redistributable' package to agent layout." + $unzipDir = Join-Path -Path $outDir -ChildPath "unzip" + Add-Type -AssemblyName System.IO.Compression.FileSystem + [System.IO.Compression.ZipFile]::ExtractToDirectory($outFile, $unzipDir) + $agentLayoutBin = Join-Path -Path $(Build.SourcesDirectory) -ChildPath "_layout\bin" + Copy-Item -Path $unzipDir -Destination $agentLayoutBin -Force + displayName: Package UCRT + + # Create agent package zip + - script: dev.cmd package Release + workingDirectory: src + displayName: Package Release + + # Upload agent package zip as build artifact + - task: PublishBuildArtifacts@1 + displayName: Publish Artifact (Windows) + inputs: + pathToPublish: _package + artifactName: runners + artifactType: container + + ################################################################################ + - job: build_linux_agent_x64 + ################################################################################ + displayName: Linux Agent (x64) + pool: + vmImage: ubuntu-16.04 + steps: + + # Steps template for non-windows platform + - template: nonwindows.template.yml + + # Create agent package zip + - script: ./dev.sh package Release + workingDirectory: src + displayName: Package Release + + # Upload agent package zip as build artifact + - task: PublishBuildArtifacts@1 + displayName: Publish Artifact (Linux) + inputs: + pathToPublish: _package + artifactName: runners + artifactType: container + + ################################################################################ + - job: build_osx_agent + ################################################################################ + displayName: macOS Agent (x64) + pool: + vmImage: macOS-10.13 + steps: + + # Steps template for non-windows platform + - template: nonwindows.template.yml + + # Create agent package zip + - script: ./dev.sh package Release + workingDirectory: src + displayName: Package Release + + # Upload agent package zip as build artifact + - task: PublishBuildArtifacts@1 + displayName: Publish Artifact (OSX) + inputs: + pathToPublish: _package + artifactName: runners + artifactType: container + +- stage: Release + dependsOn: Build + jobs: + ################################################################################ + - job: publish_agent_packages + ################################################################################ + displayName: Publish Agents (Windows/Linux/OSX) + pool: + name: ProductionRMAgents + steps: + + # Download all agent packages from all previous phases + - task: DownloadBuildArtifacts@0 + displayName: Download Agent Packages + inputs: + artifactName: runners + + # Upload agent packages to Azure blob storage and refresh Azure CDN + - powershell: | + Write-Host "Preloading Azure modules." # This is for better performance, to avoid module-autoloading. + Import-Module AzureRM, AzureRM.profile, AzureRM.Storage, Azure.Storage, AzureRM.Cdn -ErrorAction Ignore -PassThru + Enable-AzureRmAlias -Scope CurrentUser + $uploadFiles = New-Object System.Collections.ArrayList + $certificateThumbprint = (Get-ItemProperty -Path "$(ServicePrincipalReg)").ServicePrincipalCertThumbprint + $clientId = (Get-ItemProperty -Path "$(ServicePrincipalReg)").ServicePrincipalClientId + Write-Host "##vso[task.setsecret]$certificateThumbprint" + Write-Host "##vso[task.setsecret]$clientId" + Login-AzureRmAccount -ServicePrincipal -CertificateThumbprint $certificateThumbprint -ApplicationId $clientId -TenantId $(GitHubTenantId) + Select-AzureRmSubscription -SubscriptionId $(GitHubSubscriptionId) + $storage = Get-AzureRmStorageAccount -ResourceGroupName githubassets -AccountName githubassets + Get-ChildItem -LiteralPath "$(System.ArtifactsDirectory)/runners" | ForEach-Object { + $versionDir = $_.Name.Trim('.zip').Trim('.tar.gz') + $versionDir = $versionDir.SubString($versionDir.LastIndexOf('-') + 1) + Write-Host "##vso[task.setvariable variable=ReleaseAgentVersion;]$versionDir" + Write-Host "Uploading $_ to BlobStorage githubassets/runners/$versionDir" + Set-AzureStorageBlobContent -Context $storage.Context -Container runners -File "$(System.ArtifactsDirectory)/runners/$_" -Blob "$versionDir/$_" -Force + $uploadFiles.Add("/runners/$versionDir/$_") + } + Write-Host "Get CDN info" + Get-AzureRmCdnEndpoint -ProfileName githubassets -ResourceGroupName githubassets + Write-Host "Purge Azure CDN Cache" + Unpublish-AzureRmCdnEndpointContent -EndpointName githubassets -ProfileName githubassets -ResourceGroupName githubassets -PurgeContent $uploadFiles + Write-Host "Pull assets through Azure CDN" + $uploadFiles | ForEach-Object { + $downloadUrl = "https://githubassets.azureedge.net" + $_ + Write-Host $downloadUrl + Invoke-WebRequest -Uri $downloadUrl -OutFile $_.SubString($_.LastIndexOf('/') + 1) + } + displayName: Upload to Azure Blob + + # Create agent release on Github + - powershell: | + Write-Host "Creating github release." + $releaseNotes = [System.IO.File]::ReadAllText("$(Build.SourcesDirectory)\releaseNote.md").Replace("","$(ReleaseAgentVersion)") + $releaseData = @{ + tag_name = "v$(ReleaseAgentVersion)"; + target_commitish = "$(Build.SourceVersion)"; + name = "v$(ReleaseAgentVersion)"; + body = $releaseNotes; + draft = $false; + prerelease = $true; + } + $releaseParams = @{ + Uri = "https://api.github.com/repos/actions/runner/releases"; + Method = 'POST'; + Headers = @{ + Authorization = 'Basic ' + [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes("github:$(GithubToken)")); + } + ContentType = 'application/json'; + Body = (ConvertTo-Json $releaseData -Compress) + } + [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 + $releaseCreated = Invoke-RestMethod @releaseParams + Write-Host $releaseCreated + $releaseId = $releaseCreated.id + $assets = [System.IO.File]::ReadAllText("$(Build.SourcesDirectory)\assets.json").Replace("","$(ReleaseAgentVersion)") + $assetsParams = @{ + Uri = "https://uploads.github.com/repos/actions/runner/releases/$releaseId/assets?name=assets.json" + Method = 'POST'; + Headers = @{ + Authorization = 'Basic ' + [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes("github:$(GithubToken)")); + } + ContentType = 'application/octet-stream'; + Body = [system.Text.Encoding]::UTF8.GetBytes($assets) + } + Invoke-RestMethod @assetsParams + displayName: Create agent release on Github diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000000..8706b1fb89c --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,95 @@ +jobs: + +################################################################################ +- job: build_windows_x64_agent +################################################################################ + displayName: Windows Agent (x64) + pool: + vmImage: vs2017-win2016 + steps: + + # Steps template for windows platform + - template: windows.template.yml + + # Package dotnet core windows dependency (VC++ Redistributable) + - powershell: | + Write-Host "Downloading 'VC++ Redistributable' package." + $outDir = Join-Path -Path $env:TMP -ChildPath ([Guid]::NewGuid()) + New-Item -Path $outDir -ItemType directory + $outFile = Join-Path -Path $outDir -ChildPath "ucrt.zip" + Invoke-WebRequest -Uri https://vstsagenttools.blob.core.windows.net/tools/ucrt/ucrt_x64.zip -OutFile $outFile + Write-Host "Unzipping 'VC++ Redistributable' package to agent layout." + $unzipDir = Join-Path -Path $outDir -ChildPath "unzip" + Add-Type -AssemblyName System.IO.Compression.FileSystem + [System.IO.Compression.ZipFile]::ExtractToDirectory($outFile, $unzipDir) + $agentLayoutBin = Join-Path -Path $(Build.SourcesDirectory) -ChildPath "_layout\bin" + Copy-Item -Path $unzipDir -Destination $agentLayoutBin -Force + displayName: Package UCRT + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + + # Create agent package zip + - script: dev.cmd package Release + workingDirectory: src + displayName: Package Release + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + + # Upload agent package zip as build artifact + - task: PublishBuildArtifacts@1 + displayName: Publish Artifact (Windows x64) + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + inputs: + pathToPublish: _package + artifactName: agent + artifactType: container + +################################################################################ +- job: build_linux_x64_agent +################################################################################ + displayName: Linux Agent (x64) + pool: + vmImage: ubuntu-16.04 + steps: + + # Steps template for non-windows platform + - template: nonwindows.template.yml + + # Create agent package zip + - script: ./dev.sh package Release + workingDirectory: src + displayName: Package Release + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + + # Upload agent package zip as build artifact + - task: PublishBuildArtifacts@1 + displayName: Publish Artifact (Linux x64) + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + inputs: + pathToPublish: _package + artifactName: agent + artifactType: container + +################################################################################ +- job: build_osx_agent +################################################################################ + displayName: macOS Agent (x64) + pool: + vmImage: macOS-10.14 + steps: + + # Steps template for non-windows platform + - template: nonwindows.template.yml + + # Create agent package zip + - script: ./dev.sh package Release + workingDirectory: src + displayName: Package Release + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + + # Upload agent package zip as build artifact + - task: PublishBuildArtifacts@1 + displayName: Publish Artifact (OSX) + condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest')) + inputs: + pathToPublish: _package + artifactName: agent + artifactType: container diff --git a/docs/contribute.md b/docs/contribute.md new file mode 100644 index 00000000000..4546f21352a --- /dev/null +++ b/docs/contribute.md @@ -0,0 +1,41 @@ +# Contribute (Dev) + +## Dev Dependencies + +![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script) + +## Build, Test, Layout + +From src: + +![Win](res/win_sm.png) `dev {command}` + +![*nix](res/linux_sm.png) `./dev.sh {command}` + +**Commands:** + +`layout` (`l`): Run first time to create a full agent layout in {root}/_layout + +`build` (`b`): build everything and update agent layout folder + +`test` (`t`): build agent binaries and run unit tests + +Normal dev flow: +```bash +git clone https://github.com/actions/runner +cd ./src +./dev.(sh/cmd) layout # the agent that build from source is in {root}/_layout + +./dev.(sh/cmd) build # {root}/_layout will get updated +./dev.(sh/cmd) test # run all unit tests before git commit/push +``` + +## Editors + +[Using Visual Studio 2019](https://www.visualstudio.com/vs/) +[Using Visual Studio Code](https://code.visualstudio.com/) + +## Styling + +We use the dotnet foundation and CoreCLR style guidelines [located here]( +https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md) diff --git a/docs/res/apple_med.png b/docs/res/apple_med.png new file mode 100644 index 0000000000000000000000000000000000000000..467a59ae488ba7884dd954860bf4af8e877b4d52 GIT binary patch literal 1394 zcmV-&1&#WNP)P000>X1^@s6#OZ}&000CHX+uL$Nkc;* zP;zf(X>4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%R zAW1|)R7eeDV4x68nKI?mtXZ@E<1>Z@pClnULWaZiGr<_7(#RI1rKRCD2d^Y9i>FVY z{u@mWZ*Y-i2{SV@JeYyLU_`T!I^62&>iwjcgx3|w;SMtt=JKkls(lIy3XU*w7!8!? z0J49=_#hgmCD<%L)<}pAa|}ow);OWmf(a8QWFk8P!xBm@m^*iF<+W?qX5h61S&|&~ z$&)8L(Dh;X1YH3&_@kDP6pB<2uF%lX$>=Vix+Sizt~oU|H3yL`!j|J1fqCU0vLbTX zuw0MLCwPl(Vl9MO08)?55(c0G9%2n6Mjcj5fEvFNqZyYPWM6>PV2cyDUUJGYbPGU+ z;qnRCB5WE!5(NbXCjbBc$4DHH9z8k));Mel09K_4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%Q zok>JNR5%f>lFR1-0wM@XR6uf+oHLShYLJ|v$vJ04L`kCL*dRy{Ns=WaAV_S<8AJpm zw&d9GPCGio%$@)Be&74vTkowU%a-z-y?0gpYS&Ixs|{CEk;TTmjR^vQu;t~Xz#!12 zcn}EXEe0C!lG0qP&W$5Js6bL7@Kb zhO{&)$%hwVm!qQ}L=mVnV?|(&qko~yjt+Sl7SVk&@AuMfwBcZ>@2qM}bh&5;S^-0E z!N5wC<5K5&fK?!I=fOkdxIXOfbeCuwS~9tmf$Wk0Z65k3)j*J zNC)dA=$9wDi_dF%$u#vIYpYMnEzV$WoKHKwAkd2#0${c~2=D&uCH?)p z(bdlFPWz%k=S{q$^;c*aYS&nnB z3iM)SQhUc4iH&sD^V*4t#V=pR71|~AO}n)N&a7IETeK1OM}fkp2Ya8IKGOtq8V1Xv z9)4==-BrpZg~eWdWH!C@O!l~C zeL)L)U@0H#0s`ae2di{;B#;TI6eZ_bCkxcfrj~4D6sm};sqCm* znW$tZd&^g1bgp)LcGiwIi86lXt8wN%WmH92g&nuwXcb+q58pF~m3g7w43=)W%8TkH zL1B7FTuw(xM`c{@6$L2`_l?P`d{-|&Qg5ZFihHG4NmqB{sGZo1ljDhw95eOk&1ZOQ zxPD=h?R+#h!zDTCYoA||FU!#!yg4W{=m@4>VJ=AHmb*+tx(Z2R5Z&-Ro}5B-~L+`lvnFvm*O7>FecPFD&fG z1R8dQ*Vpdv-0#tRR$!zNP$F}mOJkwn>TqJ={nt>nH4PFCs{)}{PhMNSa?%vb z3DiY|qv^IOwW&(xsJ|#PUTEl`xMqHo=Qsi|( zmVhLe0+)6LcqiUc41FQlkXMh`NSobqRV8kdhr1K1Fn2#4eRJy|Rrv6N^Y@JhE z>XL?S_at*o6HJrlAH~!T2)`VWc`AbwWz%7>K(pY`;n4At4vmhIPEmPPc{3HPT${(3 zC!{2#)Y;eDm)$p-!jmS#F~+%^wwyMWHdkY!=dH(F{i^ycM85osUY_n|b%SH5o`7zy z&V4<@3YlVPG3{u5aee-c{92VbmBNfstF|{=)m_yEHMia@j-ySLR6A4$m|B?;S=GER zY1@ozj0>m>c#04~$9^(#y^u_gEYPje6<<(Ga5l9+eN!yp{chq4o0Sd>7dFndP>t8a z*lq*WNE?#1BC@J=3)u;a3|D;E!C9g4OvVgdF1L?uA0$7iFAj)Z%S>Rmn=1-bl~fJ3s#>OIGiQs@E*^VRlk4^R zz!Xj-x-aUQxgfeA%s0z9t83iL*HZJLrbo5SDZ@B)PBMu4+~VB#d;paHfELB-a(J+K zu>Pf!LtlT&=4dg&Dad~L#D;j(i&}94@n;WD(URNaFDtn3i;#lRB14l$uSm;!qVRWl`W^5`E2T zD?G_p$6Y7Z!D^}7JE8;Ck+)YQ6<}p5;dQicnmgUtdGZHLua3?{?^^l ze8K|Oo6LfV_msj^_~XBc<1)P>jH2Df^P-K$^l2q)wW4Ah;2j7Xmo#9yqp~f>r0g5J z9@D_%L-U?1mwB=F5q-h)1|@8znKZ!+!ceMvjo!nCC7MuPK$|s;yS7mMlGc ziXfroXYU;&YuN52pm*6-JTJPU%Ys=gk@rN#IJJo9|xX6w!Q*y`NJirOq>I!ld3wgTnCEXU;R~8hu*IK|V2Q62YW?3@yuP44vT1vX^h&?e`i9M%Pxv{q|J;j#P z$1qhNQY~5gvRU|T10=tzzfVX^Ge>h*^H`I2b8x20bnMV^V3WkJVV`?_WJeTtVd9B{VX=yX%SPwr!OIAA>g&~GwZkK1gCd&XXQOx5 zWUqIUH=RM@UM$$W*!Q2NcYLOHmh$UOG3 znnvo0zL|cVYOmEJ zw`|@&XN%(mufuhpACx|iTo!5&>NnVa>%Lt+wTP$vRy(y+(AZ(seO34@Zb-GWKD|D* zq}IT$cGI`9%Fps(g(QQoe`dGQXN&NBbR!QLFh zxIwkSc=z6h_=M!llMylfv*~g8bdYU}*NdHjY1sp`&=A;J%-%B`$t%xNp7lJ&e!P~! zFDfALS#BoAaIZ70PwJNV}!R$JC?l}>fE zcHC*|^5)_&+SNMQc;_|kMFxL4hokz%pCX9i-1?|&Pvc|$N!+ctxO3FY$vda!Tr5){ zARe>P)N$8QRuVLKa$q&Ja596idOJ7+fhY(hEavTOYHkN{r!a$9**J<&!Rwo;C~Pc5 zsI+;M*_E9oA=Wl>zOE1rUlmPrUpsRF3o0>DOkr<9K!5|p-IT)H!QRnL&|8G+yIet_ zjeN{TMe&`)-A;r`9N8g-jQn_C??PncD1k+1WU>M=nnXo2$i+FyR#r0o0pdts}~omldBaQ zhk$?p8#^Z(CnpO)!Q$rQ=x*xG;^;>GhmfCgq#$nQt~So@HcpNdNV%qFP9E+eR8+{0 z{`~ndF9&C3QY+#6+ zlZUH0MA{SL=uZ9bqQH>9=KDVo1(5%%APe(92juMGYX2QG3v)JzJ;VX%+YOiw$G^_S z`Da%0C;Q(9@#pLBc>LQ80A=}i)}J8$8^}mRhvvG5Ga`m}@9f08Xji~`~`TNtKFYIl;Z!l+5 zS2qY!*+r;+YWtr%^3M&2j*ozb#+^p;r49ceF zHjc=O0OL>Te`o*C-I!aOI$A+2MA?2a{(SIH7GXA|4YarUe_#d8og6(OuI~Q}s|aX{ zDd0aKu5O~1u1*dVHV&p%5H?$9h!usq(~o~SJ6ipm(*^SX7pG+bakcqhQOygZ{1Xw1 z|L;!k|MtH7pV_lNHp}mQ_xFAG$8P+)LH%i@|EmH1>Ae0>NEzggL2l+>+greB0zrb{ z1@wP9qyMSV{0IBrw%-rwfD8TUQ-AG;{us~&mVcAz^uOcb{*e4Htbg_J6Je>pfV{9Q z!WQO&mQJn?rtYGD*rV?j=&y$tKk1!hg*TC|RrhQLz?v1ItQK$V*G| zatQG9vT(9;ToiYK?g9va0t!mHLQLHuQo!dQZkB^fkn2x3`wz;0$N(_6vGn;D8Gm!t zNEsI>|0x4VVJuADP5(zyF4F!(&iDB^xoSE&+5fM2`adW5gY|+`Wauj>kMxS#HWu#I zqP)C7-0{zc|4Od}akK&$xVitv@Z;$}IV5b{-P9ni8m11;_K?48+pi)%QT|^=6#x4a z|85<>PyauSSor(f?*SGNSh(7l%R2($g|jQLYTu(w&L8moXNrGf1H{Te=$4ElU4V> zXTA9PpR9jz-gmTd7ySv(KOSG;LS|Kh(!gI@K-7ke+u1oe0a*Uaz5E+B=`;2MKpfd{teeJA-YKL8?K9J z{IdNUu3tiQk>EF67t#1-`!`&_gyYBz4gN8{Uo6CLc?Du%>1s@(#Erpq%^W+2+Sj9klI3!En_ z2N`~FfzlmOI925+F&0|LI}>(dtY*eR?%?k&-~ooh9}m$&Zp8n1k4DXa{i7+PcKavS zU(Ki&+&^WB+W_*u(>@$TivC9vHTS>k<`+F)=;lK6-+KJFZvOu)wDFh=CcKk&SG%ME@pd8FZ@hAL*A$i^ z^4?=@6zbcC&DXG4J>}@P0dOz#rYWcP(YcT=!-w=Pf2!N0ZGvmh9(M4Zh1iIMm|9Yo zM+2(dYtXdy)T!b8+1b};qA%lHcZB1?47qgSVJZe=?_EeU&QF)eRK*axaMAN4*!gja zRff+?n=UKzG+u|>Z#Sx^jQkLLW6g{EQ%+)mLwU-FO_^A@5??+Kloy@4EPHHq@HM$? zEG8K@byw!RGeOO@xe{XfcqetN!E!3-{2&PCWtc>PvAvwDG+mVWD)TuQ%#^^7*d1Dk zWNSJkH`>wp*Co-JHgL?uCv-9o1wLwIUY>vcuw)? zo`O7SXK$uhCa@4yV~uBfrRU$e=rjGF+00vA_uF4*JcFH|!epCvbJ+qcUi-Bz)oi5&hUN)YcowZcf zy<_L%@L|Aye5?1{w>KW0WM0gD?I_>ubc-M1*1o}&z?HtS=L_|$hoZYwPB{ebz9c6A z7U}a{qtnksvSP;r&y4K!&T4ultL&1RjsPe=J;GBNKLSu&`Lea;!rViJB^eJM-;a_1 zmK6`Q0o5Ec^4kzn?)f>z4}j8oo%KV7P_}6|?I%v0KQO1H$hnZx6Q!oyd%8dj+j*tQ zbozx}tciyD5pBaxaL;L=gm}R`o!*)Iu(W5U<&k=Ly0sgqCJ{F$%(no%+ z?Cl;i)O^q7x&YDx-l5|k>xJezq|9d+%c9)rk{;R8_8!n!E>q&0k4;%++SsM+oTiJ7rO7T!p zq8Gl1YgIceFXwZmo_Aq%G4~CdKNG1QOb0Z7$;|&dzVC^Qg?;x%w7v8bc^EP1@yMPG ziCJWGMw`cjPmWAHmpIo?XL}D^yJBe@0L3`1QDke}HB{Z(!okMY#lyoxG{K2Tp31<# ze*JoH)!6#!)2Dmr;%fe_*SSS9Ha9m7bHO%BjF9_-Q0Pi;uu)G@+=Tg5wOdSF`1m?}!YZ?`Q*^)V8cgJ1n*2_e z`Scspwo{~SYO)#1;}P=OG^n+LWk`pFgeZ1)cAiXa@V$6X`M=Wuy~0ymRu2F+Y8~Z6R7{^YPKnvRH%v(e{SwZvFXD=K0YqPS9anX3wc{#RCQt zFo{*?!4xm41>s63d656p+6t`4DdA;+{osl(EU84ci%ho3$oL&!u1EI8$&i>jytoPUBt7W~$)Cq}+ay zRUpTR<1_ul7oqk}QTKsHH96Fx1SRE7pV5KvGeF^yE8-xy7_V{WH1hg>4XMYlx>t!# znOV>N##Fu7cFePB&!fepQcuGqh6uWig&~(a6^JD8wrtR@?17=@1O&$6mGr}SQ~Ga3 z8lScc&2B|MQzDSn&2*;0GZwp{JKN2So)^za^cXNPiciE&CSVz#e2mug0V7F2T4LyT ze7v_dHth#*R8mqRWk)%A7h6moCV7RDZ1oUW1OboCJUzLJS8EIr+jA_0k$@6L3$8K{ zvKoA#ekLqonIQZTE=Gp%vAUmI(JvU8=2+K7!_P--NnzPFg>)!$O#vQsuk$c7+LTG$ zX+9{3TK3GPz%#)SNGu!00B>1yMf@J|xC5Qgsx3q(ueqOMQZ#)o)GNmP?liwU&Z}71 z*nOEySc0M58fDWgxilD;?5FZHnbb?i+$fWCt5Z_hIPgrNFfTn#u3+9h5p=;(`nwD` zW1ZV3@U+PesKmj&(GewUE72s?T-u`-A#%1?JxQMc{$O&$^9nIHFctO+_V7Ny>2!~e z>v++bao(7)Y4l_Is^l*!Dw@HT>6z%-oBgf=brSt1Sfkiq*TT!nxqo;r;B{iXP!PSH zT~GNo#Fr70;V+CAp<8bO?^=a;MW}VU(CLcivlffpUw*U09}fm*!NU2cLZPfrRuysz z3kx&e%i)?lEZFYrK{cU!ZDJe>?f>+GKYA;G9JaH4V9|>8A>eMsa^5k6ib#Jx*qLlS zS%{@9CCGYN)DqqmEhc0+RehlNZr43l7aTB$ZUV$%ihX33?7hOhu{H15B@4#Z(3QLq z?oybl${RzqI8c~XHe_{2b$42m&Mc%utrzUdzFvzHJA*0STC0YHI8aCI}kqHj+)27s- z6N4qIpk}FGz3V{7^uyWZO&@|g-kS|@Kbj8&m@h@^=QtkOR@uHzA&$m? ziVGMT8ZOoS^cbOwJxOyftNQUEUU@gVW1~p0R3ACe7erM&NGG_fl`Qj9K)8V0l6|>@ z5KbNtIbNE-=aGZ`xWHn>m$XwH;~ik(J1mh}4}1aL_k_X{Lotc9G2ei7c{I1*HIi;G za^CsFBMU87H>90O{&QH0YO`avR*I`KEK+(`El9A?sUx!GkaAC+r!lH$El#vBRhNyP3?;p#;F@&odej!+__ z!@k~UoE1u*G66_za4&Vbd<)3zOXE9mnr)bfH~tA^%r(}xpH$@h>A2KwbQ5)tc}ZjY zPje5Z-EiQ(WA&rh*w`u7tUsnS#1C)SX^IcOT+EZ85Xv9j$N6^txlUr}cV`MJX*yor z6g!F&o3I2{H`xax&Z*xdxpvy6lMGh>t@WDIom;owc^Cql^DK3N-{-S!$ozxLVYoVp z()iLCl+VF8J2`+DH;w!hh+$nH+E4-^jR|u+(swqUZ0heM)-)_GWo1#?17Ud1THS+! zM00*_C|bDv(NwiV)A2~@o%1f$z-_y{P#~h^e#cFy00`jV=C&|&?V`mSGjLNb(<|oW z;1Dm<$r+g>%DMZ9oxlxCHOyfOJ>A=aR-_r384dIppKX_>C2|tyiFHWWu=m~r0*+^h z(^R_>#!GSS7FlY^G(ePP{KT~ui)Pdz7oYs}6MA#C=f-4i9t7);O~Sp9Hg~#Y>_30a z*!#}9{;Xq_o=>dpxRogF#T62CloWI{4GY}oLPpbV!jx4vVjs`ZRXcEriuM~LMvQ0d_1a95JMiSWl8Lc~ zG68k9U)jk#KYd#7xgmj!Xni2*%vrVXoZHb7hb0&veMoA_jrT{a88`37DzYBQ4oRT3 z?Y8M4W7HNa(VZ0d#&j)SrwgzxMV(Y>=$?}2&UeMuYJ$8oI>k=g#QYsk_BUq!1hpbc zX!GrYkG7G|Vk$F6=T5_E6@9tV?Ls7zE`2EOVG|69EP4sF4YqC#aUy$S_94LV0MWJN5u}q}JSa+v(%W z^xLQ!+qVjg(_1r8N~LvZb4pW^!=!ZWIRrfa(WY1PiTaQr(f~fTx`E7`W@}v+6=vq< zN`v4Bfu%?ELEH3R=VL*LF~9ssav*t4Lu*28I5qDKDzYKEt`He~G-&WHGeSoA_01rgeWl12da%nUj|nF|_v`AoFVOptW$F*eDPvCm>c&~e5=!}9S?zmO`MZn+e>*R;zq za>bVWCNlH5mO~WbN}>mRGwwsRKwrR-hpZ^xpFIN=K78f&Y%im93Mvk_Lrf+(?rO|*trEPV8hP%iLbU*ui1bkY~XHb!?tRs17ZhA+wKDUGD++hX&Um^>L#lKgqN|6Il^Y+2R=Vzz3myRWF5jF$9PHe0?qoL8g3NoF;ymg`oQ#@^pLmM0 ziD0SFmolM0c^?U&DqBKLnYCSM!y3+ za>Q5JW?-NZr=+SP2NM?A_4WF5-V0BcQ(Pu76e>g%tC1Q1X~SBbo`OIZ_@jV1s(Y;g zFge4*kzIPxt?*L73hTlu&5w}!1RJ)kk|!(1H5)8y(lAh9+(T->n$>D85H$K7e10E! zKr)@FZnk}u*JXUl0!6xMt7yE)U1)rZtPZ;Fp5}WBOZWV6oKZAa<{$)*+XONW4Xag= z<1Czjudj<7u8)fOWh6yb0Of)eHo$~sdef=wifx6f`a1x%k~q5kPzlq)*W<&Y$ED}T zrKRi(yStP26<@^yY5_GfzSHTFQP);pB!3^wnOWD{*?$mlw#|0Ft&A*0q@Asno~^#Y z!!0##4jd>ktk;T)io)Vf>)W)fm@q6I^8?aOC(_QucurQr$UWOCK^(2lH;Z|i0guoR zEm%H7ZkbF+Agjx)*o}Y9uc5eb3#%Es9Q7DFW`Hm4o$&;IVi2%50zZe&9YM8W?bSYC zVq`OtW`cm?7Qzk4^v~0epqP~7wQlrq?@l36*9k~cz?T5H-(0=XLERW)Dl5<&X{wO9 zGmv^lB6Brl*35VN=`1iYGE{L6#O7NyhT;0UI<%g}ZJk^7gTgvyg}KNZ-h z9NL9?%>rebb{AoJrSERk2mQInx0FL5#Z7nZW;`7dLkzi%7U{Yf??cB7y=LAwAHHWZ zssieO34kr=EHEVomyX>cBg-AJUF6B)av77Svwegc-vg z3Jg^&a3IoX7Xf5y*#1C;rtt__Wz!yB+moMLsADxHR=Fve2w7}~dKdxG9x^wo0#eFn zeU)zKu}KZ>&HIyfGfM7Wo?j;fzT(fTrhVe?Js`7Z+Z%o>kyTEVLH2M>j{Mv-XuE(Y zegaV%4^+`|;z@yVo^B^Kr@O46RXfei2J{OoM>&qaF$I$-JMfKpTz&XJBI{oLOh2&v z)jktFM%8OhXG=~Q&Axl9RUn`wP-H*;W_}oH^qlI>&rV=d6;`m8*+xGb-ZUU{`)JP_ zo*kw_-8OtCFr|=tbHp|Q7&3M4m#;6M1s=BJsWxmPC%-xE21CqC;)qmki@<$gl1MBA zlin=w*UI%rFt|sHS3}3k0G!)Kw2ckPh>2gN4fv*-^i~eJO}%wlXc^2^tiqK5OcElo zRs`0Qq^poS7}&!y;p$qfHH2LDAeYVBK9rXP&YUbdcO{xkh zY(WH=q5h2ZI?Ss;3Ffo{s<;mT4fEPO7+&05Z$#$LsVQ%#4GT2splSX4^S3Hb9mtkE zztPS!DmFZ{@JnRzAun49LSyyua-2ZSPTKO9T;l)G2yiF_5wxWa0QXnl7a)^E~Oq*~*kKW57# z0(=Km0b3&27D$Z9WO+qD-iR8iRdrkLhsGc(bhmkh*W{934CpO$=}%RwD`drY7sr&W z4chb4>mKFAGvqpxKG7z^$dC*UwH*=psL;;k;_>-Y z;@gnoGSV-2pVi3yEy>~Yr5t3RNitA6=WK%0Y;WxZWM-c2<~s>3L~%K$%O89_JoFA4 z6|)R5PV81>l*UCp^3yLStahLVBAT{}CU2k&gp6v$1pI=Kk)$mL2S?1o6Ak!IziT11 zP)lPjsrDwMXH>VlY+ztOewkOObfR`o7>80ybW=$LJ^_z<`V>fT{rap$0@!^R@)^y| z%`c~@^*WvcwSvXGBD|{)L1P~D;Z^HPIBo1i8PN;RX*<7HGTt}{5oJz*73eMr#aUXx zJUoR{Fg6URy7pI09@6{u0V#QAMZm^7v5@ozknWx<2^}D7pqZAA*ywn>Xl)w2A94ypwMy$XTe8x3~ve~h!Fk)%t#z~4WYd39KxkSLe#$j(!gZ!ZAa%JXulnZCl? z5)Pl9>kmi-FOeB8c@1v~jRNyn8!Lswg<$;zWt1BM=Y)iVuDF9L^(+Df zCpTTUo3BvS^m|-?j_!M^8CDRI6=Q8mtM6OX>bftIv8~sX3(H5ZgdYwCDN<%~j83ed zZ8li7vG-n9zXRC&&mB)mmSH)5?w!$OcV9AOb1~3UR~T51b{8Q8Pg)9E-&8ncz9JV> z3;H-gB_P*Kk~(hBxoEnjx0!Bd%V zRT_`BT7quO2W{L{I_7upbC2M|*QlDgpo4qol_e zw!qKUm=eCsj#b8NC$k4bj--lAOH~yXr8C=$rV3~k%S*fBE#Xq0xc(Ew!CcVUhYxbB zqZ#3N4^+exbW1B(h!pf*YN*@m6KZvqS$x*O2sW2z9x$yKzLX_y{e1o8X>az+DiDo? zj;iiKlCIQ&;-&PfZT$y%Ju zO(tL5<`7W#bx2{nr6u_CgsXhfdG6q@WhL*!!Eabr{pkW}XlQ7bQOUiQ4NxtG2MKqz zXE}j^0@18WKk8oa!|V)4jw7jOJBxZf-8Cxb(<*kIqGGzC;1hShuaX_OBYeXsF8MXF)FKF$37TC6+=uwf5e6eFwpdkI&KA zu0P_5z|t2%EWz48NKignWlb8anIb}-*+^ww7&67?Ch{oY_wOqdWWqgg5Ykl zh+p-M_#q7Qtn`j_=hmxUDksbmzH<92OfqNt;q=7gJt-QR}maM)-t4Lq0_t)0%aYuBXj zSe@Vq9@eWcZF_ucgb$X&mG9Bbm~WqX6|oZxMnfU>TQ55hQ75?bJ-5Pz`SiO5o=Szx zx_>KOkMdm?W}T|OMM84-I&j>X%mB~I%0lM1QwVWKKa8>L+X-j(OGvrMWxCr6j6j9k4p0HX zxZ`D@gzh-GJiD$$l!WLzUN)@vEM|YXyW7g_qiWv0xLwI6kgQ8zalMf1;Wwbz{t4U3 zyRH2haL_fV#6O1&9mR3KEOKXJKjg%ESDjT_+6uU{6fv(mz<~)+tp_vW1IM(xTJ<}w zX_vNL-4_-X?B}@BPa>nEqd9p+RVpSdEc_o145;5OrUTwZDGk%-^K?O3ahzJW(^R^z zS5C7$dZAZ5I5Z?V5}J{5$F@q(XtPACP~BN1o52h?!Raa&L9Y&rj#Bw(%}spgj-|8n z^WrJbHx5(0o^Z09#giwZEmkdvw3#-bU@sB~oONFU!6}0RfE#ONF)bTo8NHybWZ1x8&4yxmfhy?Xt6ULQH zvD>R+YEoV^QkNH!65LAJ4G9YmzbezwNlZdwZclY1(|awY-k-`m*6%z~ zqpu=0zeOKPf9Ko*)iBDG}2?956QA|CtFTW{Pdm^?L_n99k4LcMDpQiyk z1{QR-vx7V<3X*%V<5#d#J%Ub}SA2Ol>CEXZ`YQX0;dPEJ*SvLS;gypsarDO+VV^Nj z+Xa^jjS;I`z(N0M5cy#X`dNF_B+^?114jWB+toFV8(Bc15O(;QAygt5=D);6lrujre-Sk`^<`b7{(>gOW|ksmYZ-Tpu1d``T9!v2zq(hZ2-T-W_6OZy<6pyy z)~ikro(RB=GN)8y3b*zoMW3qP!K9E_rN@u27MUOiTZ}%}d?lBXGs~!L@9XEMn~YZ2 zE-0_9`D$s00*}M?_p&X z7%e$n#*KHucuZWjaX3?oDY9BWn6`39XaP+qz9=^K7K;^lSc@+s;O_ zQhN@IT5!h>Y5c}b-hk(hcc>iY*FN+S;e^c3&)QK=Au>L69b zAg{*MaVc7tw`I-f$koXAono?3ebTF$c8P^g)D4%4wF;>hVh7vShPBs=@WwAV!+!Kl)SC0B{HZL?p5QnMKF-N=sl zHfYCL6&2ZA%1U5c`^@Or&v)BEXUaD-RCWz9o8KO9Ct4g-459i^IoBPkKkAs$B9Pw1 zQJpR?-&xE!;zK&teZY3ouX4k7BG+FmZb3V4(ma#xw$6%1;;h$VO~m`*Y?1|x@c07b zyYlUP_r|vwy*viwE++yBduwcbov&`a14-uDDbdj^PmPIy8V)5s;q91Xu19h3x75N0 z7OesiR7QwZZKv~oucitU!2_68W-^Y0MQoyG(pH=*|6_x9BIQ5x{ z8rLcM5^`hLdUk}F5-@>CImqGc^k_&l*}cw;IesXN#PRL?LpV?aLPReBH3S#nl27s= zK7pQ{|J*V%G^>PO(N83>>Zwv4ky5{cf+3FoI6fkrUvQ^)d3&TZFade!3sib#O;4Is zYYos$+}*ir)GP?wC@|<2BYV$0^>zho3Kp>{c4lhHzH&hymZ~v)A?)pcklZVg(g-&p zdEQ*Q!v~y|>M(B-*<@Y~0Icd{2YQ_)PW8D)&AOMvluF`88}8h}RzM~N#7@z;Wl6@%f%{CP(r1`up+eeC;QDXUq)LBtmT`8ajr+qu7PQZ5@YdP84tid| z7#~QjE-=EG0)=0$tQdZ=v(v>e zDWk*@xNsFi7|jynNvSLNj7k%KQ8ZPsV){jyW`3rw<@}8!k95t96z1M+&Gp5-N@*;K zP)GUOjdKzA0;?iJbi;xS(>})Rwe}uFG9o(O?JIie&0aAjUXCjV7qlkOAb=z~P6B3a<%-Ffv7D35QTxo_ifg5#OF2ug&q2J>!WsuNf*o z(3JA=!Yt8lJ~cZ#g`KCLodtg8&l|8-+9v4rznuWCoZ6Un;=q#BE7sZ^8rSW!eBJyB z?}*^ehvN|&p5E$$(bjeNaPDRicBXX?aCClBJoI|prm?pPR=wd*UJg?!=9^tv@L?eD z?mfPPin8J92R<55dTX&vrLVOfr2s3+X=`h1n8$Ydq>*`S_nKCyi*y&CxK?ONf|17| z5?AU~y{hl&Bk5 z;W)2>5XFvF9Fwp3Rm_z;5cRljBiv>#N$8x+(la{?zbT(&T(9v@#s))ej0K+|fb+kU zM6$%Sy5jzL@hA6hKff$xZ7Wo!6p`~HGhS=9)b&dEQuN-aHqhG|k(hh!+6IyL=wt{PR!!|6G zrg{(&d&yM(4GqUh$%cRJZ7BXp^oVFswn)-&3E z(==0eB_-j;=usKuGo2ro?SSSD=?OuP0JFNo2jnnETT5|R&@a{m);S=?ICU)$54CZurI*K7;4i<@#)-?x^F zSRAL*8ZxIeMPKm=+I-+FKDnYpxxYuA_|&1Nz3ypi6$>+db@Wb4SribUv(!v)>v3fx z1F_pj$OGVNl}O-B_XOJn8dIWcrIHU%S)kA;yrH-7mNmProH|G8=52k#l&g}EE6GDa zssTj_3fTrjLCYuRx^FXT*d#p#DJ&Ct&btIpf7Sb2>S#^AMriAT% zvFCfmZYcth-#iWZbn~KP4Wd(mAXXFA>s1U9u6JjLzaUaRfE7g6UikJsw0HYRSFl1ksmSRSzF-Kj?2UFtnQc;`hE$4Syk(Q-H8 zqxz%gH=oi)wbH0A-k_)#&8xs!{9ZsQ=_qL=^I?HNJ$a!3v6amH=FY-&j#7dF|k~O?L1GEPT!y7Y-}4m*9CY_YN$}c$Zab4aQ&m50c~MXg@6=6(+=?c;l_?Ox z3IB>$o$g%xSs2UEx}9JjIq?Yw!KW8xIkL%k4cw0(zNL0+gB&4;%^tjX)Gl#okl@7e2Im&wS-Cp zUD5ZI(xca5K#_-lBDABL_R5s_3Eue1^CAN4tjurY(w1fY*{?G#G9LDhZGqTGGm;e= zaVtu@&X$_`Odk>Qww7;&DiX`0PKx`ohPsB!q{mNIhW1`g`^+zMg_0_Q!H=~X7o#{v z!Cd+V;iZE6R7OP>Z1##^SLRh}Ki%;$*p%@B)4u&gD`f5%rIql|6bP9L9d5{CQf4%m zz@thsOSHOY>b;1_y*~v@Lpzg{op%^Edm}Os#yvEj_3c`b?Ae~-fLeZCY^(b(c6&JZ7Sq)rgPqO=qYD0c`}m@ihsIJKmpAcpaztgC@!$Q+*#CfvPa0 zC2)-$0i@J5qEhPdY(A>k!(SU*DyF>^7$3!t%RL%yFL+sLcD|?^OwLwZ|hD&%~~^BHq}M zz{deq5I*~H=T&#S_-5=Li+l>W!BUj@*OK_d53-^kvZW)5?@UCp&;kL8Sy>n)GG|bk z&|l*A7pgdjZhKSiWbY+IXW@th&QJwxg;4jaEYM&nluG}}K9`4p-&5gan0 zR>$ikO0{#i;J@@JuY4^Xi)h&q&WT4AZ~63zB6pH`7P0B&c0P$c?Qu?RA+Wudqab!1 z1{@dKdUy-YKAWm>GHwK}{B*t3(V#!`nM=L$GFP-5xNM7h?(h)vp`XrYTebmm<*XHBoAwA3Jdfq&B04z6j+J75B49UTS9;aenkcC4fHn9>ZO- zS9!N;6grAna17G^SouWTOQ@pPt(iYT~LeS-IjVMxXDhI$%H(=Al&FuIbCkG z1zz1q0ArAu+u({zp}dg{MU6HZZo68j&Ee%K7D>cL7j`PIrTScc^_6Gx?6artb_0$z zz}-TdnPVC`jzpJ?!0u{??nT#fg2A)eLXu9GudAyHNkz24{q1aU0XqlBJ1O&cz|P>k zPlblGO}qDO*pt{(c|?(S;4_QAy3(XjPXAV#P2MJ#F4Ir2y%P5!vM>sAEhXLE4I&`j z-4c@0DTqi(cl~a@&ojgD7sJ5rzVA5adR<4UJoe95X~aSm*hSHdu#`P<=BoZjeE$4@ zsOL0%;*~n2ehQ(L-B6Sx<$NYWLjnZ8qW}W8WL2Zvj$Udk~Fv2`c?dm zCW|4>FReYeKAap1D&m&u z!ba*$a>KLR@<+eT?cZeCg688n8T_5?*WBSiIw!__>Tulc9JjZ2;HB2A`M~_5)^lg) zD}Vy%lhG33?IP4Z`ouH~g8bvMr}3W?9239RA)3>|PSpbH{Fu3IRZNF#Py)y+k8rOZ zI#h}bL$}@)sTew22TXcKGyC&Xndi~B*a3Oj?1Ma zSMkqu>8+@!XvFCMwgwAu4R(G==@d5pk$iH*sH{Nq3i?8Xp1iwa0Lfp=+s#iFHw9wSi>?A-~Fs6N88J$ehT3XMjLxI}q^RVqyChZZF|mP4csol#AlJ&%4fejBx; zs)f>&0-S5MV?~8ftlUA&h$e5}u+`MEVx?{X2hGFL$5j1#sQcXs`GwRqH9P$P@{R~b zY@d0&DchsBdNZB$>ol&;p8v8k2aOalX8b*r_MVRvkuXw@^b8D`=2SXvWbA)Hi%P@_ zC1BtTpQ5betcau7b;v*c*FF5QT9NU0a8e+wzCoQ>U5T?E!xV@Q9!8m zR(OJ3z)I!aTAYZ?YsJ9_fD_7}ACL7HI8X0#^PaOuCtO)P$CFiZ{#E;UB&~naQyV9o z5l|3onDyb3*)u{sol4lU3*UpuOIF4@?%Og49`ibie-%x$Wcg0 zNSug`Wu1rf$u67fZ12}icTS(SN%LF))Q4cTJp&01dKBV!719POLIsVDTrui+A|QqF z$w!T#=DV>}ki$)f1($o0Oi^_y$!P-V%le*b;{2b=LbFF3_?@YnF+$m(7GyvMXdLDg zCxe;N^HRPMqL15sj7djd)v9bljW@`p%A=Z1aq~F@zhke@4{-)!-qySt&6a1a=$KAB zWo_*!?&c}dE)%X#*C8hv>88b65up}zvan~4elaCe@H-!-oYqgN*WQjB(J$=+TGibS z#NdKYiey_f*!%2|y4FNeq@gJnOy(|F=O-G0Fu>W_*%1H~LomG8Uz|Co90hWZ^Q7H67GG`` zh8f)4IA`e+paG0Pv0j^`L2B=HNuWYxmij&U59H!l?0wDm&;Gb$JzgBkdaRbBg%R-` z?}~e(sV#2exF?Itye2dp8)KN`cxCSb~p(L~&58SY6g7<`wk)cwYSX=Q$jI{#Fu45XYog0iQ$i1V%V?07I|2uAzC`PttrjwPIR-q87h_fSNHFiGM7 zN2bKkYE?8v6jXmiYi)N#h2Uc?C)lVqY91Oi5^+(uM4;!yv8U)%X;vbguaUx9e8$PVK0TCs%m+K^yJaxleC+COdeO~62TTQ)r8Sc*?~g|56;jkq(Xsm z1P3}Z4&CQ=mCha|U)0`CwmRJL3Phni8$jK}B_ZPb&&0n$=Dv`T*@cpLP^A9C{+Gqz z(JVfHjNRSe{}%IdZSkjK*Jl&(bgsodwEVYavhjtT%eujHV+V6Ez;oY!kGx2nxFJ;o zOZPfh@g%cI++0wBakj6ghy)i!?R9W4GjC)>sAIF7;WlX@-iIU_nZ~0f7t1Zn_~%p& zVR4$nJ+ktTUa~_+22ql6Sl^q_>V5gw)slFT5R9T(+iExSlibp*`0UDcU_V^=s|C}ks!TwjoEFU$qN6y zV6PzpA5Du~^|Av7XgG4+_xa$y!2x<)NH2Bzcrs}6cz@dF-{$FakSm+%0m%E#8aV{! z?D9*!Y&4BM#NYq<#~;~Io7^DKik3`;brS$@RO_@ofMRX;hSH!W8tQW#C?VtdGs&`GLttM*ma-G_--i*Q*Utc@;Vf z&X4zzkul^`kJg5wU%I79Ta#tMHKs^~zOh5DepKjvTH^1P0?SC7rLe7;)2;>!aV7)= z{$DgSH{s%;#u5XP?$(>e`?`(MVOe|@7_ax_&6$bR^(4ooR?-24@ zM`?PV$GcPd*ks6PPgo9hmWQL+kMw#|-qn=%r-cgjxsnc!IsL-#h!Jra@dQCprv^a2 zQWYUum98>Za> zNH1T$R6025cUh56DyZSF#@Drfet5OXMf+a}x57Z-n`Mo+F5wuameMY)Pm4`yi4wzX zurW@30=gJ#m_i^UT&dHs}FoG@{IyAZmw zqw%Mrwbkp*X@S7q*1y0u#S+6diymV%d4q+mEquLqljr7*1CwmkQiIs#cUBysePo!g zqN~E3<*apbNf0PXoz;$qeZ-)M=t|GDHxor(+2rIc`E~1Yt@X^W8oKV!3s1#+X$@mpu!h=gqv_H)PAdihrsG! z+-h&N-k9mL6sh1Rj^w^)d0zI(8c)d(N6pzhCR!<3t)|T?j|O6zEKZ&uTl@mG5D8mG znT^-o&{4=LtK5D;n>si*6G48^G=_}L?ias_Ta;sSCa#m49xN$ua^z?on*IaD8f&v3 zyY=6+8Ojqbzp<)g_sa~!F6aV+&K&Jb5R>+kl!U$u{_?3@zw4mam?U}4xlQua?ClvZFE6LEaV(OT{_JDifgnk>trmvPbp*1?)JF{AfIP;Xaw?m=8kWq1;mLAH zNL;Ofp4A9TXJ)h?0Gko4=UooY5Oon2fe1-x7tXgTY&hs;JyzszXb?^idQ9!bSTVF_a@ zm8yOj&B?jbceMYj&e%*kY6aPHyPcc|ixaF7i9E=reP=a8qw_)o zfLWn3z0<-P6>bxZQ1hRA^QDKn4RSP0!mX_SFXjoEy(iJMnysJz=w3NXEEBU4#x*Bq zDSnOg60Y?sAJS{{cFjeomygWQ$S43ufjf9nxlvXM-xlt&TkS zZCCsq*PTM$wYa{L=?QY)A~l)Zt$rUCx7y0o%X$HgYYM>C&d=-b)ASs|+u257WG@Ku zxeqgIzwk2q*81;hLGnVKRVp>mlKcjyO4r}z)^)i=aOhCT$bPN8$+&F;%{21I4D0H* zjW$Ff=r~aS2a?oOtMw@~!3^lnDJ`chIbryyXJ@X}X5ImDUWdPL1iajL_SPU}(ftuN zRYr*nA2g7wNcuIsFhnBx4XvZBWa|#8>T@|+2Tm3Jn4P?2%86`@P$H-(bl8sBfq}sy zEu`jYK^=)%!g7vRO`KexhhheN7UFyK{*!{l)ER7VEkeyy1oPt5mKR^E>6czP$Xnre z9bDO8i7&L``}m{!EX$c#lR2s58gUcBD2exLAEJtXXmx#0SYE0&vn1ME^9*c+y1poX z0wbp9Ddgx5E0hH??1p@B0Z%ofkZ{VktIm8ld!&*wLwDQ6iicrzxn;E$ks)`lm(nqq@43PSfV1zsQ4OTntwo&C>GQ$JGH=`zO{f(Yf&PXIo&wu5LJf0J9MR+TAO zn%=&81+Nh6EYtqfce~Bspk58(VNH1?X zqWSsf2i0;d&k*Mh|FtY^XN zp=$>nKt-sVr*|u$^$R|k4g;^f4<+X&qxocxs0Gxfv@9UKp}_xOv0Vvr^yMGG8K450 z%)2U8D>J9ANW@BpCdPK;SE_OJ(EUnJRaWS1FD{zLs~t}u3<=&IXuwn8+1h*k zx%$O{Y?CHvXV=pHVQ!j*zwkZV`>-mO8hXQBw9zP#CM-X72iqYDT1QQ!K1hGRC-j%OBhD%k==?KYSsbupn>T2@hHS7+ln8HHF! zKDNylbjy#YI>!>b09bjib&FYoj^S{E)HxDxjkd6m`r+_f7CbS&X`yeIVUw4c#cN@t z>3+s6Z&qtt4%d0EX35j%a$Vb+nQ18hVnQE*b z_@f0ZdGv{YNeKG-AQru-tAHei%ymL2BMxH9O*dWUHU&H4j4QC|k~}AfTM`lHXKXkA?Ix_cNm9$^skpnZyjYivv|a3Bc|1f8{SkAT`E3Dygp?#;%zqZRH}5;n zLJ|IV*FdUWsS6LQ;+P^lIhU2U&q+}r%yP27!9-d4KIBYr-meF8i?|NNltBZ~4`G-i;oO&_(^bY* zG4GfTN12LLO4nO#t$0@xp31&ImzMa35K>ZzoWo)90SleAmaozNs2-QURknJ$Mn;9p z>JOisph@M!*tK~pRF!-_w}!anR{I5$T|)v_BBbdT-d^bN<*BXJ~o!R zt}tqIX?VQ6WtbSvw+1ni3>kG2;s}%qh%9N+s-QXYOWXO;K%o{=vs?6J9o9iyrsXUz zkE@mpO|QXTOwY5#TZdr*rK3<&xmr#+xBjiaMQHTNc_d;U?Kfke3icfg407mO%|TPh zA`^*@T|j{6kl@pvwkGxyngZTwi^e7Jusorv$|YrGjA1QF@gbz1p>c6oH@COP_kG%` zWiueseEY8zW6`GNzyG4;vU`6z;te`H+`6r};DcNRC{qCU$Fr3cX_Ttge=TVFkk)|3|f4pbA9)6)r( z%5>_zqB=sg#JSO%N}#9vcY}ZOYPEgY`}7A;dK6+yVom@64Xh#G_SggkL1R)(hG!=g zbT(oHZ(yRWZi+RI#!I+UqyLTnSF8=IT`p6P-yq`}D(Xu{CjGF|;-Z<{-`6LWjiWA; zMt3q!>Dc?m#M_&A&;gvrmhF3A4Mk4XDDjq-Ms89I&)%J2LcTWC-^5{kb7DC@Sh+OOS3+})#dV3doF!ITRA(Salsm(zzLea zm?D5rBm)=|Knh;Ib^%akf0}&{f%}QN#8f+KUrH}0ScIY(c$9Q(n`yKrcXy3UOoC*J zMDmnAfXEy09=%Qk*V`b>t0)QIEQQGE8j!S1W(M~^n>zLrfm@wY)nZdrdY}#%M}4yA z?A6WAC(D;_NJl>y5)|N_jqi&03o@V|Sw}@{lMhs(QW(b}D`3dg&DYtI;CW@D2!tBc z6U2q#4WlDy?~y)#EwW&`%;Z?LIGGjr8)qhkIqVr{1KQ5Hhz1#?znE!HX$&#y>$rVm zuhL{5Dd!g`b#G@2nuME3bx=GWvFxXCOkNRG@!z*+KU*A_l;V2gsSzY(bmxA&5RK-w z+&Y+cKJuIg6if4d6>_xm`|o{5@hBbKJGUC}%!nfIy0v0e*pzugwKaZTc^`%{%qYzo z55kzotIc3O zMV{ecoxm4l`C$>qJ&XtZ{7+Jgun~$#PTNasXT%;X=@t7tC6V0N(jtj`aZ7)X^f0l= z8je0f9Zk*QZf4g_N(X6#^$MlhU*GuRjJ!<==YxjK5v@+DBAff)?ie} z`q?zBFaGZ$qB!!f`22fG!KSP&XJxCKpC%C^+)^w|H?$J3J%1aT-zqsz(quH?;h+YW zQ8GMls_u$Hk{`_9(bUvd4#kZpU@|shq^=XCe2w)T(=^bzypZ~B{w*Z0^p4Zot-1SXNI(mfp&%f8Jru0*F?blk zyKYw6CAi&-;`EyL`zJ>jMVg@&^jpTyD5TsOBXe`u(>hCEs4h98f&68y-N=iaNicmq;uL7iKD}GR}#{>ndK#qu0fyu4#uAnp8@1tv0 zmbe!7CLX(3rY2llCBMGRga}^32^?7mcs7zRwgP!i({f}Wq!B=waZu>uFa)=cj z_csFr_gCBbD%Imx219U4v&4JsR$4eqO#*s)M0yLg_mCMT7d>J=3R5o#%+AdX#z~#0 zol}4?=Co(nDTp+7uL`dh_K@VRy#oPpOqy{wE%`3nUHFI)?Q-al1xeVzCEx2-4c2J9H zdu99JjpBx>agHFwy%xr1{4-^dB{B?=>19uUugJ4?yJbY*2shy&2&NIHA$2gUQ*`HX zWi4H>tcUc&lJ$b-iwV3&Ep^W7sfG(&JPRec(A6R0^v6cQm{?N6?y0QViqzIUxeDuw z)M_@AD8t3r7Joh{zoKl(WQ(?k$q?xTYx<&P^9M@~{I z>`J}RMqR9y@Uo4M)Co``)DV8dF+4B2_ZJkKTU{<;Gpp_iN)3h z;{=A{ZW3`RuQj|FhXy%(+LB7Aq;+Jv(?_D{rXw#OeXK3SHJ zIJZ*4U+O6DR=cqfEZ7$>;*v$nNXS(>e`^kYK;a+Bqh|W5fTba8sVAPFD{RpG@*p<; zD1P+i4CQ78iqY%9(5j??S;A6I6=}*}*eEF?i6*9@p<$sGTC5#-)QqpF{e9Sy>jZIB)Du60sE;(XeRTMDi>->9Fny+W)SH)1 z5qXEV%`S@cNmo;6XStiyEGv2N#1L)lS{tvL${5qhm6a?F4@dy{<3YK;yGoVj9~ZM) z1?i_)r-#1GQo1!taN)_IMM>60?~W(QQ_SnO&Eph(Uipz0mXs98{6{?qZm{tS?(%VN zjoK<*1=9)y?-bhgO8}6&mSq#tnn);)1&9pAazP93IH?N$@z1)6?^i^K=S<^OaGsjo zTg^gz73m?y6l30Ok`fXGgoGl+dL4;IDvw8E3Ek2N>`yqEN@u`)eJ`IC9S={tupMTu6YzBBd+IVAB7+!P9(7>Q5DvTDcZ4Si1BM11KN>8NU>a+Xq z8s9Rr|4BWql4@=HrYyIfOLiqM%EYYv)hVkNpYdJp)5@@CA~Ps>8yUeMDY>lisi&C? zD*_RDTE$XHgit)J4JcZq6p`KaFnLaN;zol8GTZN=HDk=HJx0bvbMpKs7NnG4ZqdIHWDNqC^KStp}Yr zL4OlsBC9w1*i2Evev+t5 zXNf&$L=5fAQ7`f<+Qi;7DWbxHNPc-$Xae$&dnI0v-VfK?YjfrtwC7QfV3_7l|G^MR zEcp!P)(~~7zGNf_r^xmEFYZ1pRe^9xmJcE5$^t%hi6QU{HbeZ^jPmjtP+)Vi$E%(*@^(1w`#VTRf8Ige}wz?gTF>abyoWhoY-BDGy3a-Gj zg&Tw!8AW6J8E9}~e(JS*y4doJ!h;lX6P>uB#cy6(a1c(Ie@cAHEjqiofxt+1p`c8G zdheUxk?dnqU)ebbgDj9@FJ`#?9&_&-9c#^fN#Eitugqhmkfz8}R- z&dl@&Afb_F|GPgWyXz+NzIsMNA+x!PuoIcwuJ#pECKF-d# zBQ@N=f^fPTP$VR1wRqWcCIAF&qAOi_+*YYjIUS6K@3;|0r*9_V1Ww(G>}4^07r!Ey zTg+2%ZxIM<_ye`OzME&=lDcE=G{JO4sE-pvrjw;s3iZionfVgOxr2AFS&YoP`AMAy z7W7f(c?a{im#4oF`(eka^^w@ImH7Nc-+R~1la-aK@ps_7CG_CG)_dWN{19$Kns&2$ zX5)LOgh(f2%a%jf9(nUZ z9np#?r^4s4`z-1NL_{)wBC`ypui*7Fv*hX{t_3!rzO$r6k@Cl8mFc^FRtT3jx1tRn zqBRj~o}mp-c+RPAA-#K-d04hSZQ;LkcwK_R80}Cep&5`|06~RH$x8*rqdLD^sw|i? z+EGEVhNId<#5X9{h?_LV_DG_ViOIo@5%`L?`mP#^X{hRg$YmC#77*3~B!nr0#+E2p zj=#8gKE$h`(_2&G;V=d#zKs_tNrth^YQE z`l3;)7H`uMjRk@4vnB(AEY?V<<^Zl}Fg{~|=F|YC2(~eeM^8{+lnrWL6duGfUnY!_ ziKfd%%YEv2lxGvsbtzqddw){=Vz%W=R{MV*C)hBFm*O~0(Yfo4Ep<^umYMS(mx|B- zPKGkS(tD4+$yn6%WbsPnv`VILy@L0v4yS!=+mW|xIeRj+;zg+wS!jK+CRd+`T>lk9 z_9x$IT)}%WppW-&3$wVEiMe^`gMUA61gV*UH|vUPUoulV$R4QLV41buaVk}?HJM@P z_4YzaCoZ~~h|-eEPwPvQN8r4Z)d?_LE9;=_f20chkKUFvz+-4D3=!V>Ys!C_R7uC+ z%*rn)S33DUR$qwTjsy@;95q)NbdhI<_cTDs(cn#6I!o&wyKULxKi`N43!PV|Q3SWKQ857Y)4p zg>J&Q2`{i#IE7NXnMl&l?6(6A7gV=L)a9rlD*W=AJ>em$t_PcKXx1KrR#jz_&n>|i zwe>IJMiBc)hJ-u2-^1N&Wy?d1hjk1=@69N#2cL!(>}ysj2wi;9(UM(fmPY3>UeE(~ zT)4$sY(LkLnp6U^L96;y0XNd^&bzJA6jO@Ty`U-TH^h;&H#{QMf#k{lg2B+nFhCJUv4H zWfGgQ7qUgYcB0B&p|c){b461&*epqWk@~0j-Ra9mKbE$m9zTwPrYFf2q|@z}Y=_6M zV0_;{!-u|Jz8ied?c@xCrmg5(;Cw@*`=;dL8M?GrMIz5iN?{#sY*+8_hdE@GRKa_% zmAfU0n%$M;IpUkTu#Huwx^@UXXZjmyjLC~QLSl=&0BW_q6}ROfuy_v~50Z_e&{cF_ zF5xnmg5x{4mSq9twN<$PyL{OGYUma`7>Ne@_#h6S#n0=(+~FqYxR_vW)8jLr%Ks}J zZu3f5>NoCWOV`2Ww;~jB8%$_&2}81XzTPne+k?ce`M{;aav^-|R(F?LcY)uEmMTIhb?mmVCm3*-~ z+(L3|q%vH#6b^LzjFEzFa8~F7QDNjBufVn>!=xp-Zx?`r;AaEzj|7N#e+HYJ|9HJpTM`j$d&GikYZ6n?hE@YNWF<+W09 zA5OS=eze|BNf8sjHNWZf=8U<#SfW7Kfo5I0OS(X9YN+VV{eA|udBj9==@$EO0%Kp; zJi4`bUMngygb5aeEgQI|I&|%X7>7VZS=FtKpfdwXJoX9gTFaTQd}b3tkMA|^w*Bt!&$)DC#Mbc`r2SyH(VtlEHvD+bqqddp z!&QUcp`%Z7wnn%TOjy5szq){Q%1`ij`>L@Mr87`*kL!*Xy^SHue?PP{D3EO zb>OOf#f6g04|IhaAWd``@)6hX!Uz<+DGq@u&0un7B`SQ=gj1H5F)As3{hJrruM(kq z`g^DCDa(4;oF%Bn>&}7lDrmTpYPY(XMLXfj(&&x{a?=tL85@~U_`NV)I2O z9Pa)0{u&0j2#PW&aAYM{#Y!0&AsPvdT(Ph4rjZz%2_s1Lxs|F88;r*r@1Z}|ifV<{ zn*Rei04YDZh3vnC-^c8dx!#P3@&b2a+02H$fOG9F$A}*v=j9f7Sp9nCc5{HR*zFGE( z=*@Y%*JJB2?Q-z=@z!KS${5nFff_U87EH~ z11kLZU@31w?nJM+3^9lzGG;R-W_$A;bSvvzG=~2Ka(Qnlke%@{Ov;Je&CQJ}X#L5b z-umOO&6AmCV5rk+D(mn%;!>Z!3bP@ITU!3@I8+WCKF} zM2V3VVe2gW-GoimCF}e>WZyUF!**rrdXeu>pYu zk>H#6-~6A<{kLhFBmX&nP>DJm+nVtq4ZgMf6MG}kfG{u6gumpFpj#}p&%w(bhKeWk zyO7|RdPG-yi6{zn+L?A?Ut<1jEK5;VF*lPSAG6d5!uib}VnoCg5wu7t!{j1v_{$;+ zzacCiCEpxj3m3A@L_HvFUEv@mY$29v(h^AhRV@N0?UXLzwp860O_RKV(g>HjYYfI7 zy2(i^HU?q0Wmx_fPWh$Y6w(gUUrXd+v10DkJrUSs zQg}Ljwyzs2QwSiYq0T;qw)-0C&R-#ix|Va=o9;;IHaQZtKANOo(sX3DJ4;Zv)1WiA zQz@PMdxyo!lfbZgS$Q#*vPm&DIO@2%1IZPh_9_42kf#CVpTodNr=w zc|=}ByIhH8n7^{kt@dJ~3XGX7KYXTzyfla}h%-shVlvr)+v^a4^7{Q{hS;|syK6jQ zp2F-_Ge*t^Vd`2nOJt@Z_4ZccyI;jsKYjWXi#dn|c|Da$@X>w_y4c!SaB15^y7b(O zi@n2#%pAyErvMDMuXYQ_JJYDsp*g z>=icS2uh=%%L3F|yVFTxR~GbD0{G*ulX3ZA2eU@>aBfCj`cf5TCXL$gol%B^kg4UX zz_Rz{iZ0L|Y68ght;0x?$;V3EcApjReGt?SG@2WAxX^F+JYP&u6y)jn*RpqFl9+~6 z_5K4Xv)ARn{tNz8De-~lfAVjhA!XOf$O~gq)28y{5&`v(yOI@Xm;4wI^ZeGXyKJ&S zg~fAw_l#+2ApE@{JVC|gb+Ree4TVUlS-;0{v~-Khn!km>rk&`97E zjjYLZclz#imX2lg=5%h#*J2x#Qf4ref@4@GpRJ=VJzu59MWbnW~?Zvpl)cW zT153Gv(Qk6C06`3@WGt~p^}67QR~W3GQ`AuNCi!teJGMSRzN6bP!no+9HLDDJO&H5 z9W7;pS@#{{6{5TO#&K3yjy3Hp2BlO1Oho4`B!yh1E-*H$_3}ZR^(3o^<5b04qzdQ| z4L?0e-uKQu?W#426I2G2aht){x4nkCS#@A)^K4(;yY*$oDSQSz+kq;>^EKrk!uP~8 zTg;neli?l`$;Y>&W`a(`-;el6o3^vPo@Hp#!!=k*d$xg0BTdK;E2ZrQ?@Ck@()o%~ z1c_oL8~I!ADrvDL{)7xwU5^C0i#G8)82=Jw@;b6$a@Lg+*8tsotq;Z^aeKT-qwHhm zb5+a}8r|7FZoC8>NeUYfJV0n{M3M!IfSPeF(Ptz!-o@Uxeup2!Eg#W9xKMv>y zAY0qT6R%$&)O52O{)~T|!Zt+edCu)}C)O?1-7g~F9Dg1sf~9uG%JNB6??joqpJ2#> zdP{E%(`igC#Dxuq*=^2zF2OtL|F@Yl-8aJO-Wy~}&I%3#qsjNvmN9_vZmHSrb9AnB zXm?w0o$bVj};|%N6jZ@o{rEnHw498_=6Z zMMtAQ8GNumQvi`Ac$`>4>1>YPP0vkb<@f&DdnezI&_%eSu9+G@4C#|(!terp)GXPo z>Enmw2oUR3zn|cwZ2riW50xmwE}60X#V-p_e9x4mjEmK< z6nfIJU16C7VuUpor=E3kiz3?45FEvywvEU9F#rZM#77N+!Q(Px$RYM6+6jE)UX}+c zG8EHl{+d>x;btxwa>$Z&JK+lHmeXAYww-oB)}>v&U0_)q@bvU-K%@AgUKY6J=uzI-=mwl_OEW7g0`p+_hh~L3 zazww857-cOpiJ5vS4-U6bf5+q2GgW4HQ}E&m;Yg{n|Y{~(>A7rMTb^WqQiu8I@M2^n^1D}Y1FskAkYX7wm<`z z#0}y3lWGh}mj@fgUZb_q{U3Sv8A{NrQ)NgpLHNpg$2B*4py`Yunk z51{0RpHv7(T*9Q`X~0B!+w_@6#|i z_nV2(_dM>apVQZQZHFY4R-VZ7Z9U=96^ZZjfXC*Z!AMw{`jl-H`zk?!3Ik4AEIDL4 znlQgA>yOu2Y!3nl(n_(3)y55`ZMo(fC1cl=!GRE8Y2!oJr)?M9f-r!C*Xia~i@!0Y zO2D6lE~_;j$*5uyB!CvgC?6SiQxq0KS`QsS|(B z%h2+8cpB?s1)h41j-Z(|2`L%r@(uJ69Z6e>_Hoq`|8pI;8q+!bWR}dpmn5Mtf^fws znbL~Ym<4f~Xmu4=x^gCX`e&Q*f}sGfT%8*sHj^| zM80A4qz^h&I^$AKXOXA6bGWqF1C57;PRNs$%}FA}DKLQ8-M#ay-E!9lpUm)Nca;q> zR$I@OMO&3d-NFM>8T`clFQ12%8Pi;LOZyTrujczE(2odz_>}OxN*vx>MN3k}4L26s z_$^)d+_2i^cI@n@47!>hYps|ovy0hAO=o8gSNo2wQZh_|*wKoHuG#Z{$BOXQ5}x;Lq?$ZnVMJxn&=t{(ppGTGV?fBA1Eii)H5 zKj;K;R+m~jC3al;RDBiZ6fNbkB=drJ-Nct?sI011_wa+9-p^is;vD?lcjBiDOUtSS~ z=`~od^@lwENm#xakwke;vh$m8inU?$1UUI`)>dz`-8x1hr=EIorn){ek7`1v3w~FA zId@lU9X8%=(VM9GQpV3Uy!R|^9~Km^d)NG!Dt-FJNB0hdzdpkfosfeQvqgOurHgB1 z7|XvhLe=*VlL*Y;#15eV!lSn_{C96abA$kFB|(PYpUP}Nex88)PG#eJp`dl)R^eBe z79KiND(;WnP-@gBawgz1k`#JiHMrYIx$d8R%c{iO`ZMFXxwuBfKJtw4X==!Na@|QeHE_HQj>_(7>e+|9F8e^LrTX|E92|f;Vd( zsE6BKfuRj!*0Np?^r<^4J3IYGy1KeN$4_^`IyesTlLvm#r)%y5XW2sbtJ$=o6QyW- zi(L^Az3p9X56i8Cqu~{)bJAb5SBs{}4~tsTbR>bOj4f@a-dK9SmRVMR%(pu`5&BYS z#7=IuuHBZAziWXs0ZlRk4UXXy2#&1vG{g1lQSk<)bSRbjERg7A>C4TuiZbQggAGIF z7?Br}quy2R|4vA|tR*7Q=1W$5*(aP_$_L~_V6F$7TQ>r>ViU(-OQQe~0Pcn+V5gIP zM^={!8!iu49|e`v_|!(Y^p2sXe#xvKm|O&mrYAtN4lV@Q(g|6moPTSHYqaV-N(}x0 z7jKGC%0U@-q|a~+;}62vif3ZVeV|(Mv>$`Y`O{v`&p!$tlLeq52U_dQPB3!fI!a4|iPHiT2)+TX{8tNL)>hb_o(>Rlx3U6BX0Rv*{?|`d@7cdd_HeX2B`J6@ zw|1N2ce|Z3HzT+nvAj1XRf%PgA7B};3^f#uz4sBQ_kW2k>$gV!9y04*NZ|^b;RRPq ziryP~osWwespOd!pShQRQheU&wsBi^8T;Sh2)*;(GEWpyrFCjn?D?*Ik6QO`Ewj`7 zl(y7Ojf!FwrgylQWtdpV^D@fLx+b`^vK6ZxTd*|VjfH(#{Dh*aswrm* z%@-SQc76)5(6erbJfC8s){4nQur2Lr&H`YG#9REf zv!Zj1_QQJ?pVhRIgOqTS>4EbI-}CR4V?Iw)82Q_54|aegQ?K3Ses&m)iU6YVv-9(6 z?`ucUnBZ!2Ki)tFFrN1s_@Li(baWh`;WK+50R-AZeU?bS-ZDuajMEV898xu>-eA*f z!rgtA_2PDnF5U0(Z0%$>!XW5d%*GAQleW$%P6x*Hic*%A3R_kScMum9kI+yKCg2H$ z4`vHsFeMbpklcRx7--dT^J;e06YVry95v|bfk0q}dDplAI7MsR+==n{+?~u$ul^zy z^`E+LWjv;KY?YZeQt`7$gk$=hZq~Wkl@N=Wi->5HxKt!;IvI!I>3ruf_-HPPTumI0 zb{H1+Taz7Y)xxy)JOQU7E~{{zK@7=xf5XZj=B%vGjo<1AC6p!zX`0IGFl1bN+%=7~ z_RjoQQ~2A;bF&apzwB$*0^s z(_ntr74TX8RGV9~_o;!G<9=hB$Sa0@n5hZQ^tw3ZzI4kO4=al)P={mI`*?RI6(c$& z%wW$bs5k~oKQbmtNU~-pHzh|EATdN+NGem7GtnZ2^!GJORjM)|3R5;Yz(Ib_-8vD> z-$*|on4|xd7-7BGlC@{s5>{5(oUHTDBlb;&_Le89{~;Aq#M7wBjE57^$F<<6?yn&` zA-MXa(%#KIdT+NsO^Xp~ym)9u(cv}t>MRwho>XJ96ChsB`w_`3=U7*rxIVH7ZCe=g z0~Jh4Si_CW_va{|wQLcIeL1a`Ks@Z&BsbK#g@Z=LK$=T>U*`-JL>yaJ09U#kAlXjx zX42e#8mD{M|(`X1?3dd-YnW}J82+xR(N zDpo!Y4JDgaTiN3q64ZFlOqPQ|ZdYy!S6PpIpfO-)P;WWIv~%L*1ODMBPvOlSvfz<_ z=lA~KZwRC#Uo5BxXN*ME=w-w9IGc!LW-Pi^zCP=6^Tq`r*c_Iwf z6QmI!%ba1rx7)ZnlX{c~+5QADAtK?b`$P`3V^AJo+RT7|Gk=3xWXZuoEdE&O=AP|3cxKU0j{l@(KdyKl$yf8we-0giGJ|x!hp5T`;|0o3_Bq z+O18y#xd*R$S~=5$lTuJk8S$VAxfXLZ8GZ1?q!hpUmB2F&}cEF%e(F`4uN?pvZUTe z!oJrd1~ne1A3;R5-|_X@W%F;-%BJJ@k9XJGV5sJ|U%!T9DsPAQI*)=`?(PK|S3XA@ z!Ml)ZUbfC4lxwzy;%HR-ZZ5e)FUj^qRgno7XVDre^?dT+GvH6>vP3Bn(kak|E2}&t z{@jh=+-i|KG%nLD`illS z3*<1oTQ@Mx#RE*_xV`OkdyxIcQBe8RvV;-MRv2(_aMXQyjg<7ijdAtcKZCN6#|Mmu zOZhdRgA{TE17<4$lhu2PlOr8heEn{)DO>Vy;}<-*g1`>qpY!uKezR~eCi1U#6#=+n zcm`94xh~S1%~p)P?wBPM|Ff6id{xjJI4`nsD+tP)rofvFbiz-^Y?i}EJmVjQR!;mG z1o}~C4e;B;_h2i@bx%FaS-xI(PzFVor8Y?x{ z3pHND413g{ zjTe<8&mpV!WO;hY*p7ILUvEq8kjdhzB<0`Vbf?ulewj%MVnz93fOq-+PnDv=(OU3w ztcxtBG?PB9P_h=~S2&YC?o>H=HDR1&bw`868XZd8lJ^L1oxiDjL~A9=E9o+Z7c^l- z152WQYXyV1K~z7?gPVS~Na-`RGDj(*D}V6n}??A%t8eia+!fDEzbt&`Y9_ zFpvGis!09-?}`1U71xo&|198F^cJc*pZ}J;70_8WJQH4jiT*D^bC8ASsV%_Q2DWom zp|6NsN?Go0r^z^TtdJeU#m-p4v}Q3FociQ(sZ%}m)m#OlQp2b)17s**gUk{wwG>-k z(Jm>10A3s^i2Y$rBz52ikeMj}s^XV83*1$U#A>+SZusSVt@5)+H)A%CzT9y(o!0S} z=EYXZ9zwb|F?zXQaDH6#da>RIrtr5HH|=;7y#v>8&{#f=0KH_@V#C^e^@Smt@MHNL zNR?zsdwjG8bKFjrGwJUJA8k8L4Vxl*T`ERv0dY#-=`lure9+w zS6Y2#&D2C@n;PdrZG`jx8tP?1^QwH+@2mej@2_b8(_q$YAPUnKy6M$y8vSbz@#9#v z#rZl6!Li+)Ewmm9!B9!8mHKq!bm z4cJ=e^}CX@b4d*UzC8#kJ6{ZDLrc{SdEqOH+fF{SEICWzHhS-?lt}jL zs%4R)9f#tWdfTVWQEFvtnI-EQ)9j2ZhKtlOHE$mVzV2+fbufeb5PwIKkBtY#As z01A9C@v;Fy-HP7g%uG*L7gWcodifj>tv=|Ypw$!m{g61;9j+Lc6I4%X^mg}&APn8Eez|r|P@A^l6uP@^dOkCLZe18M1g5tK ztWGV*nmo^s;qoVK$JarI{Ec)2Hgsm=wand~Ulbn3nT_NMZ8w<_X`CFqWmp3xgDJ|>h#TM=G=g-^*6!CO=SPmXb2-Yg|0$qNA#T< z1e?YEaKB%2=;QA~N0;r6Y25K>|9G^MbsO#VL_qh=lMtc{)};uls5BV~g$G8a&vgDBL(w%+E?koOti@Cz z9RL289=35ePJYkLl7a@S>n;JU3Y}kz69K;#l)SIYZBx)P#7*tx2dO7*j>+3(3@ zq!*j=*g`@B0~U3&ov0MeG2}-i6N?I|o_?-+o&}eBP?i+Yz>u1^gX9CEto-pq)9Hh| z*Mf>@*S`}{y-M5^u>`%!@P7gal)h@a`jO-{?tMu!>k^&{^m)!_Qz{v#o@h1*`WQ-6 z@cZJ#dGI%rPEC<4L_y-OUjHm1Gaz!X(PhUF($r9U>%9B6-_x0{INN|pmu)4POF4Qp zfW3y8J3n3RMqur6_jLO~xIJ42cN*oDvhO3X>k4wr5cU8D%2_x#C_ zYMNQ07+4x~OEvZm56wY*m-nYEu!*wnm;g_*0ITlxSTN!{mlosYl69iupx{N*lhBE37}c;b{I>2Xj)72{x3mE^k37L2SN-yGlgK^cdIBQ)oU==&3DPosk&z%--y=yAM>yVW(2E6+gF2;Fp?iTB;)t6UfI`z%c+OIb@qgi+Ec5^!0JBmG5 z7dm}UIzv-U$HK2XKtz_%G}Gn6>P1uM%c-H)V@e|4Mkgv!b~1<7C_@qf-FVxlxoXK4 zcsXeq4F1XlD(Me98O9bX%`RziL_AKTX34Gq((cy3SHtmp5 z@Y7|t!P^DP%;NRmznec@?oA;l9j;7IPjj2~Mnt(mqQyGBet}fQgVvJ==;YL-?ki*y zJKCms*Bix`f!S8#Z-;8`mp`H9V!9J(yQ3q63oVn-#0D*GGJl}vTM3w=o9I~@Ni2u@ zV&tPwXE26hMe~K{#qC>cnO_K?jF3ARiw7)l)Gx=BKE2l@TC{N_54#G2vSds$eHzwm zyWNat)~^rMV3eGfWw#o|zvlhdg@!^;HLPf_+6w&vrKhW}0R7$~Q%ZfI6^D|CaNE9+ zg*=%4=$A-{(!mC-Nf2X*%{tsbmm-0Pgd7e-rQv72213OMwk_KCOt$XcuSEzXMOVh` z)0vHDM$U4cpraRVunhx83qJZVk-;X zQP)`uj&lKYK)J*7y2bcK``CGCIK_PYelty5@P4DfWI0{cABx&8rpnbwtW8ta!Uj2{ z(LC5y)3xRuLDl^)G(f|8H`>2+WTn5=Y%`UA1`McoNXxY>lpKr9TTdXQB zMl-rtoSHHXc-$hp0b?AS$Q<^NYhiwrm`sTJt7I2BUKJr7lh+i&xA(g(8==Y+h51?d zUyET7m*~sA#cKqVvpAgK2bGg-@Y1YI(5sfq?k;Yd!d6ILSBvi`e!OGZf)X&a%; zu6vJ$s1{IV`79#>h>xrNX7rJ5lUGCr1 z-;-0n0O3N={~>Fg93_MVq%VR{z+@0g8}^yZKRxH=VCghqzkY`GLHq98f^mA?0K}-> z+UVmpjeHbdxfdM&~ z^39J6Ez}jSr-s>CtG$$GP>)9fyRN=#yVRwYh!v)Edr+@}<>iw^ElYt5;o+V(hS-t} zbuk&!z6}A)Fq>>1R${TiR>mn^rzgo>ESFAxbOjZHCWDf6W=v>Dkm}I>lUrbIbxGRK zlV+?u3d??Mu@=_>`?qQ4FEgLNfgJv|>-=K6*%`M}asKYdU>rMDulmHZvES-cRbFmC3i8H1 z{Xnen8=Vf{yDuP}&(TU+8W{2biD?HJmS-H9@VDcp^=}|uGNrOs!a0*SU2M1aVR)_}V$JPp!hg*Jpa|bQyH0#o z9z-Wx_79PZN$(?ja4QVj6A{?h&mu$!={fcKmVS0~V#pO}z*^~e_72$vK&kXw%E1tx z$Vnn>y!d;sq6yz@&#)4F^Z4H>^qf)TG5l~~TVmw6$o-kb1s23qa|*ZYl-N{yA0g18uscns3hLL;8n43b<~z$vopKRSkTZWUazPpEtgzQK#n_ zNkTC*cY2btg&K^evM`jIvMy0g5r%Y%x@XRU@ac<$gqfM_MA8Gu$Y0X~G<0l+v)e=2 zht-8bN&Bu*vH6x|fA*odv$%?`xNi{>ni9fVWoXwa)J!)uW)D33v))oPez8|~TrS>d zndRX*t5J`QvGpjkd%`sHAE`A{vw_bv%+JH2+>VGRmf?8wiB}BEE{KhJn#;q2kT9>2 zVqUDDoWp+WW}j^*CYk*m3}djFa1rX^*+qM(6NW7ajmmcy5iu|?&nGNQp5?>rtWK*Z z$dRU|9^|CydBs0)aj~{BW9^$Dh8|3zqK9>n8Qrw)RIwwV=-b7;?9c;Jl~utd{#`LP zv%H|72P94pjYs3MESZDN={xY`T2%qI-T~;ujzJtY@J*WlE7}ZRH(Ij0LFsgEN5ZXv zIO~JiQUIfwWbWi%C{~H9p)rPT0Lcdi9#C>XK~{^9o8ZM`8|MolSJa}iz*CU!E!Z^v zM)g|rwwAArXTe5FStv|S@k!()(A8pon=o6WwLWt2fHxD3Dv_U+nb`~yOm9{kn^wP( z{QrTwZYT47L0fMY5E-Y?sO~-%QJd@LTQR$p>IpKdQ2vX|LA!xiCemKCH z9G6pK7YajaGo`39bjp8OYz`)_V-2KyZ_;qizRBjeKfk-mtQ;?w7 z*VxEi$jQ`DHO;p9T@q9NEzEH8Ho8;|tXY55zDi@3K+3w2m*6r_FT*KlZ!99jSNh7pEq8!e?bVy1?qRmg z2o?Nroaea!Tot65E@T|h!l`gc#w;rJ<-nw|;Ou_IxdC^s8WBplK8dp;Uo!s=JOR98 zTnyDf(}U(Z?YmG)BNW0~b|*Q+sJ{v@;@<4p+5thCty=fV67RVTZjVo0N7hf`<~*q- z>^{J9*uwe@h*X$<7~@)ay#4An-eTi+UIJFZW0l}sE&bQw13kVF*e9?_;w$ z2qZpTaz6EY{`jmtE_j+FI1Vg54rdF1O(NRRlT3bPqoek0AZEE1dAZT3xB*Wl?CZoA zZT0swA{WW(O1i$Ff+>=IbNR2H{b{^-OE!nR=8HuE8vv%X($dmDg-J@AkSJ{$Yu}w& zvgdc}cZ(DL*H`N=&o?t=*{WRIQeses?a-urdDIDG)cNWpr3(2;Lx-a15F|z%z$GuQ zpR*V2`l4`Hz;9&SU(P7!+;+jBeZ2rgQVdB9(DuEL?U~niUhNzp^;z-X2zqVQNmDs^ z3Z5P;NI#>m?nyXt^TWXh3Y!2voIshr)t6O~>I9byN?r!K^7gT`Hh2Ug=5#GaMEAKk+J8{&3Jn*ZFh2YEyxt#rjkJL6X(Ner*LaS zV|T#!23n-1@Cd2fHMDucyl(94xVkeVT2 zLC$CXr#q~LJ_r=p` z1q_N^Tnh$nJ0(Kiv(7IWTcFeNyHr;el%!2!mW|2*L}w{~e? zOamY`KF%|BKD1-0D@7&lzLB5`h06cc_dqzEDvKUETRe@uf=Mv)r%5}zhh3UP+}N0P z39ZHm7B$+6K3caaTzu3wVj@cdpRtycc8Wg3(A(~dO z9g$0@Aof{4JQZmg-xsz9avH^CvLC*(M920Ec^;$EykKJNSIiLWykxRi7DLc@FOq3U z^6K9Dl_k0)es~NwwOl$ZBc%%Ej@ukV+*7(TreV;B8MX{MOjUb^x$rpqPgtSOud z2nUdeAF@#KN5l%#a$z*kV~9!!axQGX942!;G(;;<6pbK9G3VBuQQ$j$O{qInmlKH3 zK+|Fn3mKvKft=cHJoQn4#i;G$BT%AvtpI)Q^*7+aAvz)a;#*Z+9p|#u&+fRydA;W2 z{6^^31hVIP+kUAAw8_N6sRnQHyrw@>qy-!W5L}OXKiV0FxMBBVVPv;Fs772jnPLj zLS3?iRNHMTt|cib6x(w6KYj%4yYrv|LP)87x^C)xngi9lyi9=}Y`%gW?o>qW^$5s-(c*6WGtnCTf*)N;d!VNsY( z$tmF?qO|JNkTjhNvD8q@>|@bEsVnDp=Wtn$`_Pl~+)a}fkmJNEp$G4;f~vMjRfWqY z110BCcr+mVeAa+x^zl!cu*1YWU_Tw=g17YM@$TZr2V^qF{sd?;ofQXxo}S)EuopDX z!Gj}q?ZtS!JOFVn=5a1--n%Xk>7f7iwl;BR&BNUePp9`pz{^bl**B=$ULHCq*F9om zzfQ*K=IufA&1KU-Zicg|xrA|0If7o3{tf_-?kvhg$2<9w|gyzDnJiK08J;C9~XkH(MeE$ofPd!C=5tzWLW}PI5!`Sl6STDs6<_ z>tsb0i_V|z?J)S6PkX`9d76ie9wBe|_HMvGyG{lPl^+O~MyxPsH5<*a{?>BvM>kF@ zJ}w#4R6{bpfp)tP!Jw%XgP2GZe(d?D@h`wo|ffA~byp z*>zSWktGTlgLd+`vQt?Av;}h>94HjettxI zWBc9zI+E^N^Cl>!*=&!6ecX`S{24IEzp{hRuyTCQW#d(re-FNT2>2oyjjK65Y(5aX z4J@)Ao5H&#iG7aA!qsy~lydf$A;{ z7i?)7Iu+|RUt~iW7rZe5FTIcvko^r?W0C7-Ys&fhdi*naygd_mdA#5P7{w7;N8SGp z%d*>P`(@pP5V9T>Y6v(^ppc9V(dCR*{X7|Fs%{hO8PuXx{-)r_Jph54^dR5|KqdOt z&ZyokT4gzY+~|1FX?17dQ%lJtoZ)mCF$^Hf;7vL?aY|)1Hj>Sw-ak?;kODt2GZ?5C zIv%%KwJ9@)LX$?ee}lgb0KKW)*xrLr!@#`pUjz)Az&{vuJ7!2_1#r0*N?cUa(ligA zqQKvH@l-HQWab-dy;H^Di2Oh=IUHFqZWCk9PhW)44MikF^sm$$ZHqt6;0q09y*{-g zvRG4uBLO>!a)yckrsbzR4tm-ADJ$>p=wN(u0uFdaRXCOYL3WHMbXgXsLZQhPyzE<% z>4Yv-XnuU=U)tChk|bt)o<&qz^f6S5%ovB<3$wEu-lSEBi9=QxVjJ-OMeJLv3J1Ro z`4Drlw`oZXy^V%d3(Kr{im+Q;EA2%h5`*dY)$punYAyCSf@h`RE@`@G+N&u6e;Nc%KEc)jYm90T}#HDD!*q@p1QpaO8k0VWXSO>zTO zO1q;#uia+4P__*OqB;|e)iXzfJc2K)pAIXv(blI-iF8QOmT_$dh}}jTY+?-O2Uu{C z4E*o5N7LCufIXE9*t%W(>p^?xwxa+{;Kn*YJ-7fYI|6?pMBi;dqnT8=@DxNriG58Ngu{4f?MC8||pn3Oxk7rw0UjME~}qOeVCo@5}D z@{!N^6wnLOyEC0&x}rhpqVw@`ckHE~n~9kw2eMe#S_UC4>7)0> z`z`r zC0Oq$p~HquFAmkIk7Z=Zm?BXynjZoqctm)w@X2J* z5B&?QkLUgmC!bIInUrQ~b>`>$H~XRyLju7L%?%XzI)_CLu$%;^ml)Kl_jkVE05SCQ z7b2e6*JK}aF>44UZm3Wj*FM=pTBGy3$J`-7Ss!gLN9~edi77yZWdcqdalqqQh)~x1 zrh^I8<<14qmF)u7l@D7nuaz=537T}Ei`a`qHRj$IC5vUk6Utalhcf0npAAr44F@^?%2BFFsziELIq;z_&l?^vBCk}p?5)zRXgt0_&BBiUZj5nUetf>^6t-)Ixm`Vm zBycm#lqg2O`gUl>|0Wvm?cXc!3#gBPx(IJIK#PXNspF49Zte?EOWy(Q%Q9f9+*d*Xnxg8706%g@RZo=awsWL9V_%B5QEHhWo|QXRh#pQ( z8~-d6JzdK{IQ2aa1I@BS9cqg~6=wGLlglbja}7zkwYPjYk{H0H>-D0;%1XR~T%=jw zkEfS9!JDuHAW8l9n8hyWt(V<`H{u8YV_EfLox^`GhYtT;8s3f81ET1 z1v5P#nh{EIPR_0M4c_y0PnHO;$8G7jR62Sy7jZPeU~pmtbe?bW)hf`6H=Nvd_9WKq zJP5^_vVl=isqC5q(YKzVHw0>STU)7W?Ubp>yqGBLvOA|Dx?sxoXp?|X&@>~oP|k9O6BhL(`g}l$~W{s)c2c7S=8{0HC^r3R8Tc~KJ!mCH26%l6g7%K#hVgJD{n|7Agq(T6#E*L{kd;L$53)Mq(PUR_7vk1& zNM-?zu4C@g?MT1!Qsz!7E+0RC#yjgbtB1?%ZSg3$T*KrgsI^*1(t7l)Ci`s*-cKnl z5#(m)+(|ZeHn3y-)9?)J7Dbh#dH=$3dMh%Xjcx=(RtyX%uPNfK&Y34{{AcT=8)B0}=NK=SN z*MxtfRTtkwo1CE5{R{~uJ_otX_Bw~&RE|xCo_^O80j*ab8E8827Fd^KL*9sMc%Q+m zsOv`~iAP{J+U5!56!xrntjg__ELn}Fp9RISMp1(L&aY;v^!p$&%9NKuStkeqeU_N5E? z@wH?UQcjeA;CL=!QB|xVE~yQaA7A8D1oIQ&xqNPVSBMXJXgEW57RH^NbU``ErOUi1yRBGtoh< zqZ+NgpP6S(Wt+%xxk^A;Hl}sg8;9*)lo>_b&ffktyDznn$!}q_WfdYN?*JKx&ECkv z6VQ$nXT7+OEI74nrT{ve`NRAYgmH>X!U%B$is(O-o*K1I`+ls?+d$i zrrU&atR20ZJh+K+Md+TR7LzkMbU71%$JBkTp3yp^U?&Z1xFs3Bi1hB8a!JTdeOwR9 z&YS4kV4dV3bQcUIjsL*O`JD~ACr{W|M2EpHOL|)Sa_`JHQ|il3*d6lS)ZD_quB{0y z=sa5rHU?}zblh(2y)NloRaFIL=ZrV_RuxZ5y`U|;TRrt}0?hc6_tv*Vce@S`$HgqZ zJEbLkppSiQu&-*_$pp)R8kFC#v0K2S8a(EqmRz(9sG$^$9tn}_3OXFmnyS5OrJNDg zm#ecL;b7DGX8H4=(%t2dF#q-p-OEt}oU?+@-vsH7+gV{!VVC{sBC4d!zQV0KfUtqR z*ul?de}G zJi*oK?+LK|&ZmmH@eXUCJxVzl|ND$z9JAZ%B4~&nsPf*j{dDPzV(H$+(UG>dG^3?E&?>wv0nJ>T0r1!}6w0Z-9hVUI1wiP< z4Zx&@UqJlW?Kprxn8;brei4<-Uan0@tMT(DZ}`4H7Ib*Q&IN$ipI%_Pz2{KbKvDEX z>I;6&$C7N%RM>G5+e}GVYoY%Aen#LqcC z8BaHhArQTWpVunRyCsp<6<_sBx%6@EVJ9u@fY*)3^T_jwpXSE8rN8mhyfvW)%q;04`-P^Q|DxIV{xbJdcx zYlW=)^dt<1Hp|+6^BP0;`*eg=o)IKX%Wo4F#$w`yik}{QhAiypUs<_P;U;FCv}YNw z-|`!Idov_NRp+w8>gjJtnWV?pBFdBQ#cR9`%Zii7;8X3 zc#C+qunef5kJFH-HPwado6eUfuf#sDhOcIc&(FD9V}f_p&}CV5|G}I9ogR1tO1?T%Cg7m*In=Z;^N}DoV!tL|MQ?%+wGv){I$^eeC1X~ z_k6ZB@XoXwHun8yggpd0Bva1dC~|abvjO~_4R+APEbsu@W`-cBm-H!eDNf_X#k8JF zwk19TF+?|cMt*omVRAA0VG*rmLe@6!wfTsK@END`$w^Sdy%btAu$s~$pO1y@&HKL` zJk3@6*X*hSa?U8^i}OQZkY=SH9Q%P$T}-XHG&;1Q6ktQ0Aja?m&iw}G_MtuQ->kYa zyZO0=klUru5x8i-knnTILfNV2@gVk~t3UgUkYbWbkx05Yez-gBGs6IClUAWh^MPqT zQ_4^Z8@S?E^p9-hY+J1oESrb`Y_MZy|LFRntZQygr3zb=I6;Lb&=nJK|4zB{mR1rq z9CjqaTOtgdoJMaz7!f)ib6II--I&s{7xnPYHz-8t8L{Ijo9SlVe!`HH%%9`=B6XGo zxFv=1Um5?3N0-FdcmEW>UX?2G#I^)#qVF7zwtSKdA=+;WzqRzasb@#_YV)t+W@@{0m7xo;o{e^3;2s(`p z1i+Si7X*ItJtWG%<$VAeGTo=E>VT(P02v%FDn8{q{{BJX2P~=lPXRo940kd7)mU5+ z0g_D&;J(WtfILpK3XQ%gH__Z_Pr?$<~xV1aKBBg*Q+gCv2hjcmTiuKS5Nj6A+^{gY8a{+2fbLB6k9UV9SfF=(H9^8^?83G^B{Un{6qNR zqy=xLF}wSgz4Es%qEVz;9+~)Od@*yR7;H@Opm|G7Yd4u${wcx*yLhV_Y?oexJ)6k~ zL0hcv5y_kslk`PXdZtm$s-qti?h!@cU|7EJRlrJf5f+MQNIF&2R#4}ONvn=7)5De# zOU@PtF)mP))C+&Cm3pJrU&=<8%+=ZuAHvyE6sw*Pqz0SN^+WA<1|c*`l>cBk)BO2w zEM=F4C$x*DV991tglSQEnEHNne_zxB1TnN3wus>GiH858qcj+`WbK@scA}73+n)2I z-}2et%KmmAtuu>NDaEEGr~Jpp7Q>7Xt>TRjU8rJ%p&lj07+pT|Q6vc60S1GH~$GI>bL`J&wil_gU`qs^aVgtG7-bG_Hhg> z!ake`qtYAu95>iPl!)?z`doRAj47Mo`66;#c5D)K1A9tWFoKc>03c(fgTbf`LdEWE z1>edzAQQ++x@@f;K{iq2pGj00i-xiAVUp~o1J#|12|gNopQ+7ZR3r!6Y@Et5p=&Z^ zFdqLwx}h;kpY_FRn4h1Y4tQD!@Vf^4*_ORb&^BB_kR_A3GEE!7E{}Aj5~jXK*&a=g zdcM*j*HEjuCC}#%7mh3bhmK!}-UShbgT*OMkmDi80sI0L|GW| z%7Kkjj&gXI%~H0j&{Py-Mu#9+bXa-lfibZDQ7>`FhjqWVNnTg`A-V8RB}YfOT?p0> z{ndKfVGTCIr__F~N-j}+l!Inmj%Q!KY+CzbdT$C4s<1((EIe?ey>mW#!8&~^g-sO! zC9Zddm08Fdsp(X98fTjs5&@Xte*rr0yI(u!R$3-V!*Z!VO&k^OR>T0 z^37d3YNvk`QImYvovNEXG$ArG4f8P76!0FI0Xj#3zq9NE_Q)yLzZ)F|She2BWB&#E zUP#l$vjsptU-xytn{A+WT6>20r_$n$8UOTjJOP(^9~m%9OE3p3j9xO$|st|^&B35P<4*`D2FUWpu8PwuAPr#ui?z|Z1@2qDq zXaoLhXG9QdH-FH=;L4BGxJdSa%Hh8Cc~#Y;q>C#EK>@BY@pq6(xf9|v?GLqWnS6;6 zeDX{^6DrkkS$gX5w+m{ib3&m}R~g6zFbnL1C5~xE`j??lmj9AhwbAfSJd8GZj=Ewtx*~5{ zy&#YAAOIK@6Rq+}CW*C@_5JKIK6`COrUpwYi9A~w8>4jp$g~S%g!;H;jPrR|54TRLWDXiMQ>^Ros}H5a zs{-Ms81#~bIK_^ezxUr~6Wlge$;-+t@wlJ$ck=@NFR>9wb+jDOnw~i=aM#^0_Wg?a zO3WM+1L-OCJjt#ri!*@O|Mt&u8USVTz{oZu-$Ug;Q~%XE7|J-+xea^t(g zsIeS0{wpyD5>xo7Vg=Oig#n|~&EmQg1r&FsVwMqV1w0%D5|zllK*fRuh2u z0Ov7@IX7uT<9x-21eszqlPkLYdg+kN8pJU@%D-*-8=+Xl`L7sgR5v;Sv|8;C#(sE~ z!g9RHAzs=*HMt>Bs@Tl04(2?-y}bWa0<{eWKQ!5nilslase8@graFJ zP(MGNfi=_^4c^#ATa;M8PsU;ED1^-pUdZh*wBz9$60Lk$Y8oTB5N!Aa)7%k^Av=;y zxr1>w#;JrTA$Co9YYNf*CL{#3Wml*R zt%z0VfQv_M87pRpw`oJ?VyQ^M00>o3|er^ zF3hUw>&w$-QD!S8)n?b`D+|T$r3~0&de0D|h@*cBL#nnDQ`bD+T8k##ZciludP@lqgROl7uJ0A5Q`0mRV3D#|Jz?E*uhfbR z^H4t=r{d}Wf5xXM8BEUT1x+=QuoZN_?arr&A3i%f3$pD7#_s^v+Uo=`YS@``nj`gT zx~pEJ4ga#Uw7g;5)U%FgkwWdst*WXbcEb*#4|3PE;ILvwm!V*!VJ4FzYX{17!}9&( z#Fm<+PR$qLq1ly>TQM^bvR)CMq5BkB=T)V1d(omD7@N$w9e%U9fcIBD?X6+!MZRRT zry*ct%mTixfwD3hc++uklO?nT&!`q1Qv3U*^z`)!fRjNK&{ni_0))E+jMD8t03pWl zKPG?c0PvG?IwgDDczv@1ZY+mE zSTM1BI#h%|GSdMiCw12w!uOZRUf zIE0Q(qqF_$5F=<7KOrcOI@Q`-;tdB|7tf~BE6J7^O(@sqRi6ltMz7zBEMr+;!%u){ zVi_W{J@Fo*N#k7eTy|W4egNd72FPnOiNy1VLs88o4->zUL^!EK`8!uJCt}#=l3%qb z&?0*gVz4A66v+H*f4||a_cF%C8!Q2~e&qbTmR^>=je|oWc--9i#+um+0J925x7UDM zkvoeft(*=V+wwS~f5ElxhM0m1P&+;;y(o^HME9A-v?tX{9&}w?DB`J=rxXbHpxp;T zflr?~D7X%gux=l_bhJvff4G2(sc=;%?F-RXzk%Kvm3nkH0)wMAY z@_sO3<%&r&8f~OF0wp6bjb~{3>U%zvrtnR`yYF98qqj~!kJ*buuMi>PEd^2}1!_G< z5(u_)<~_}5 zV+}@tSBpX+(O05r&6^ZfP{sHM#ImIjN=#fdZ$>dG`R(O>RuTlu)rq6yV@GdbwI8Qj zW7hWtEX)eEEA_Yrr8Q1VmI7#-sn7U3PG`VR2UL?E!4=ytt;$LI9^M2j%{X~@>cEUS zt}RCZnUH3I-$lG!5o3DQJ#pEuB2C&U<-j5}qy-O%8%I50@?Pb@B!lx;S@<&v$J3** zHZzN{luqXP+kXQtq9dUWNIUFF|jhFg`poQy*QXk0M#DU@0jzq@L2h~PUK+Xw<3gSOIZ%)SsLedmwGTHwKL z{a#v{S5uR4fBy-fol%qm6FoOiXrr9*?|}kL2JAJ8syI=4(gUzcC>b{%gsSlVJh+tc zA#z8SZOo|bWm z0Y!N`!5~o-DB1dVwj6TM&U2!@2Z8H=aZ=Jg!}Up7q%=7y(s$j|wYnHh<_)=bwOSeU zMj6e0V$zreN3vW|Q(#{n-u5w+wFhV3eK8HiE-Vxs_G`prMev0*xK1tG+uK>l>_iwU zkwojbscA4&r!%jVBuEGCVbIyNb4#~;^)2!!LvRG!-Qg*$yrXVp2EXB`(hu$aVh#S2 zg&&A(8TPtVHN4P-uZJsH{HPDl>eK~v_(C8L(?M*VPEFNdgf6$V;mY2;k#IuV^J~io zP^P0HkdglXhW%Ndn`gS%tN&n{|2_mGzx|QpcfGiE()sf21~>$zf~ju}XzhAjR? zNot@6ng%N2(I*5c;L7E^_8OIPaS%in&)Tiy&dTk?bf3A2uBGMVoFWu+peneUB>=zh`T6My% z)9cd|$7oL&$TX9wrugN)$<}Z#%8)TVtvk15HW%{DkSkjma=^4hHiImYKr|i2R=D=OC!@cJlq(p znP)LHs1^?>FUb)YUu(sDXBo-YVb+?K6E85C^Y9W^y_-bV&yG^oIvRrW8kQrTqgXi@ z-1W&o4+h6q>4A-(IRks0`km1nbbj@Ei4OCd42G?4$MqXqd;0=#Oao0OeG(D2!J*vW z+XH``uY%8!Cr$8;er;%+@36ue?8}MNxl*VQ!;_C*&R}l4b*4fiw^dZ6BYtxI1wh%VU01zQQ4Em7y{{0Jz zZT#szfeEw>Zgn^Y%-CONF_b`A1`M@+>Zp{oa^LtUeRPEa11b%p0gcz)DX{@jW#Myw z4;zZxO#E11ez;)xQ^1q?7j0>wES|=*S>Ts87p!s)KQ}T4vs$c}BoO=JYsm_h4ZoSm zKv`DG9))0_zrXpM_&hk`Q085!$>e8ZXzs~L&{TSJB zD?H56B(}FG$CBUQe4`QJ7;@JLS*GSsNG3!bqt#yUr0e2Yv<_axtVnSWOp_~G0aN#S zl}k}(!T0;WpY_ha9sIL%{gb$`R;3g0edl7iT`1YcV^1ilQ=97XV1YaJxRV*KsZV-{ z4@Zh#HjR8&w5oG&0p~){!(UnXlb;aUFch2;ZJ0K5u&p|?cg5-OmK3HmAw5ptyCTGI z&KT*s_w^z6bLeJ$#6f(aY$v=8j#)(+*sR_(NFLno15>iL$@~}%LvGcqLQYxj_eP8x zv%InDc6KIVFhx>Bc0_Vx8v(zGv$Ex|Y~!aT^b)&nFv2%N)s;=FVXn>ntY2?6W(@YN zw?SeLYuA(Ws0NXHy;3j$-BsO3|K-yl5c) zX|TlcnOHD3WE5a^7Un)1eKB60afW2(^ z%cd-Iwo={Id{VlA_j#)_smEVs$lmT>ETBSJ<2yNa%)?1x90&}0+<##}I=Fw3k1@Ne zwXw`2$x06rhmj)ROQ2LEkH4mKpVo?*H&N@kNlD{|+y2cX?5xwW(!FFCpQwI9C|C z9rZ_FvI?xI2^~4hkgwKOMBe-F_cwX%gmb#9UUf}}XZTT(Yls1wWcpYBWj|-iVFHIv zPu&$DI439I$UMDp=2l3x&b_7!B~Jm27tuxyH^VgBnPSJODD-2)-=^?Bci|_G2fldW zfO@%Y>-61HQlE$^?Je9$u*?=t^zVI6{~>d*b55~w$%S_4fx-!f7RSHsP=1@%<3fyr zQuw7bFdoK4_{3dNb$L?6`1F&5`c~_Zo!&dZBqWf=L%MNqeX^+2ov%8{>u;1nj7wcw zaeB(g#38V~Zk$gi%dB68U`_LY&fslmY)m$cR$5v~v%W9@;OCM0{pEK~n_8E6$PmwX zAO;e6GXjU<22^z+Xzw>pA(jxvt+}qqd*s$<_2}$uq!0Vgk@c}D+CrK z*Cr|w6pGQ%n4+WD6xqidzrS?~v|I+yJR4xx7As{haYGuS|0Hk!l@GA;5>U9PE4tl< z_H@RMIodll?#Sy*tIC%zD}BX4Q!V%+Fp|bXH2_7UQpkxP4D_-=fw7JQa>^o8EQuO4 zbQsJ$A4+T9!9GK+;)x{=S(wd-YD9JT$FMNh<9-yaxOoPX$Wap@10Bx_B(v~Oe){Fz~||s?sVZ?rBI~ygh(Zk_;ziY#TNElpnAII+r%!>i>-)mKaS2SEURvd!bpR3Nq2`dBHi5}jr^o^ zcO%{1-6fsUozmS%ryxkzS)4nsc>MU-YtJ?281JwJb!kp6E`~BeBxMedB!R_H6oaI7 zOdK(r$?;A?tx&?$qAKCK>WLo9x%}ERw(^^oRM17S9Ios+ zx{zRQkw5D!ZBqY(aW|*=%KbR&*EfRRbm#Hq({SyK8>D|WBXMIpF|TviR~xCmQ=k{# zIdcL1zEdDmH@WTg#ubN2>sKf`1ucvK7{&Nnl_2Ow8aNyeh2dt`)_zqmchp@;x@?MX z{ykV7v|%@>kXP_^SqrQ^+o6u^cv;$s{z;tMTtBAy)8niGxa#C}-o}5R&pXC|*V1V% zXukNj=I$s}qJo78GjqfpN1Bz|%80nJ`ujchljJ0)a(kH#gv^YROqtA2CF`nS@PJLP zt*vqTJl(kiYq?kM+nx4Oot0C{do1;9uyw@6Byr)VJGs+C*CE#rh!j`e12~(v)eeLq z>AFn8@Bf8p%3K^g{vrK{p@3-~Bm{|KT<>Lk>wQr~TKA4Qh71@Zg7bCy?JVdPIDhke z?zQ5NODI}o*xE88{rjsWqirhX+cDSbZ!1pX`K%JOYYLW8c_(y7o34y`NutT6zecq) zcsvm>4$jVu2wus%kNTX`bIUj1GTMKI?`PKMPvm)XSS1_HbIALG=stFE+ObL}(|mk) z3|hVj)TcL$z>B#Mkad1-G+Hh$Q03OOwp{g*Vvs<;0}ID>^5dx1?!#l(At$F>rpbe= z7XN%>4#RIm7K~xlVBcNs$rI-_n3{^Yeep6urY=UxpY7SG%uv_QM+D zig96e-S?7ouTK!*)al!?$lF%esg}=7>SP$OB#^dbo2dI82Q{iPZG^wigvGc_ z7_L7tRQ`6*Lhd}L2>fp_5--!|;YbijV0s0TF<)E2%a0G~LczGVuL(w+;xT+@_3=`+ zO-3L{3Nr~$4n)PkW&3R*yfP(*2EF#Tr0ZheD8}E~NDLne>eD}#nq`X+B3uxPtp4bg zW$5Z^lBePn0S}#b!0P?rluv62vI48zeXe zcl-OMAiNVJXgk5MF+auKT>7oOq3Qf0W$}hdf9gYRIQ~j~<&QFZP4ow+cLM1Pt+0r) z>W65FfL1O7->3WBlOYXtsM2PnpT8F#0=FbZ4nPz`#ZGm9d1~oJGaz zHB50UuBVJ>WWn=12m^AKRXE_oJ!zirUqdhpFYi#a)la?~Qc}Bio7;{n3KjlnG~Ho_ zlHz-BoP9UTv?~LXuA(*AWFwDN7?w$mSO)uW3cE1IC<_&_=<%;&viU=_yH}R$)OPG& zqW^5ejIKbQaY>X=LfzhEmv=ZP{Sjyd6UuRol3 zCNj~dt46*qyzj$L3Y8S9tDkvu*f2);hQ?F(cQea!YdWuZpc{!>*8!@ z^jhkQ82(aWvs>x&K22z-NBO~@+SPZteSQ= z5^yyCPMJQ0nTX{ix52;afOxNN(mVZg%!cG#JxX&T+-kqMS?;9NbcQu`4#7BVeNxcC z+%vQ7f74f%*Y+{CTe(Fn#SA`JT<gU8 z-XjN9^jYZp1dT#wi;W}y%Zt!rrS{_TPp6|94FFxZy)8Tun8w8s+2XD(mjYRD5d87F zuF&6`*nFa1IFX2D-bcuXD5ChWZ~F0W|I_Eg+yL;~{SCbHfuy7vaY@wcDxG4pO{;aQ zHf^A)CVJ-roHmFJ)J~#eGc^@h3_0G(U{MwT-0}9);vZ@kUml@i?b+_)6n$$+kOHn5 zG#utXb@qUqm%Oj9;2SxAE*e<*fJGZv1sBo*=`~~Vht`#n&T8u(LF-eP&PvOv13%P% zUEVj|ZWCkGq3^r?M!_NV35lZ@6`pji^|rf=;{*E)KENo`VIlI}?@v*uq2UH|3Iy*Zn(B{J{iLtY zHOt&eFGak&$)wH-du3&%^#M?7c^eiq0D3}Zr@}-FH&{6@-Tk<%gd0>SwA_E{DvR$_ zwvQ@Ao8#d8JFEZn{j6y<8kvB{?Pi(#9xV`oq{9E8Rc^yk5llvE)8T^P5hn|`qxeA4 zdNa42+3(pgzzg>96Pyx^R$j<|ZAtF~L4Gl?PN+~^{ z+TqO@j~@q?ebAxq*>^mDhT7OzsWZQ^6MeoWhx1JGxHROGGlyl$mWDcj%Y+kwy)qz2 z-3RQ~A37<%wgPEy<9h5n@O1zi=2uNxEc$=~ZFEW7-3)ah9gT06AdMUAmF+egWEBVBoo(P zry5DDJT%kvNvq3^tXQiUEUm2~Ca$wiy}Ns`#yQ$Z9QnrCU5tHfdA|bwzy~q+TnN+b z>wCx$LND&dC`_enYAUwWIC3#WzZpPo-rTG@mX?-}`z}mXKr7Ypy$GlfNTMC2Yzzr#!#4U}l$U`6 zt3B-Nq3TtG-0h+Q?c-;@94IV>HNf%z>??66xQ>8T@dgi4M-!jU=3>^1r^f9716K2O zQT@YI;*LNB=3Eq0%?D=@HxQsQL2NmW{gT6s#v- zJ$!6!&n2!jZ6}hakg2xcVnRL6>(6gKjm@t0#{tiGyAf?jLOO#RR?Ij9d;nby{;vE#I3`6&(Ai-xb)&qE1|M z7Wx!vxf#jPeBvkYARK6JZ$fEzLa5z$c7%HaYZ^e@Os1mYt8Q)}N#>oX$qSMA77UIUFP?^5boy_EAQP zfIC>BPz9P?Lb5W&J$SQCmaa&sS85LchwKnAIsF2bm?kasaXm2K`QCwCqql|gxbX~= z9#N0aka6UlpM@OB(sPn*$Y^FG5y0KkFDN!P022(S!p(BdcJxvcz0yYl@O-abAg<{!Dw__ ztTmA$4cC!cG@xZ&J)e8QZMY$*26KO96ePs8TXFyJccfZh%r*r!OGHLm)Vlxs0r5e~ zfmhi4i3UYs|2(1-zC+`w<8qMKAIBU~*Bdr2otWwQz9)^ZhCNS!$RXh;XSVx zds7=RWWp(|1RIAXnK^{2sS7-JhABFfv2X(I0L;h(%QTfkBVzu{B;2MDJMbB zXtqxf=|7Cpt&TkT?;zo|N5b58S~t7#As0A7Ufp_*N^*RzmB{}J=iu=?CvpmJj&9y4 zaHd@9@5Y93-;Ii~8h43C%UX%)E00PmUwVPeYmX6l)$X|390&ZDq`0%z)ChM?=Uicl z+mbA9EY-lq!Z9pmWCn%lkM;Fe1&mm4D1mdh0DU|N@=nF8=nFOlCL)!MYe)W=yaX(n z*dg0i;vYU2-cwOZg4vjpPxwPw9&CAEY_`}SL9E+adsoFAr{gbM*W);|+jHD1nUWB! z3~7e;{q&?y-!73$UI%kwM|0pXx0r-ngT)ehkRfxE^^|6g=!sv03B@|wbI`vrLC#KS zeQjC;jH4DY(bkVq^u{udrp&w4%Z>|ta zJJ2QMbp>|DPhYSX>tUE|5@0+puYlQw{{(Q`x_ofMtv41>j%;x6w`V3O9z{;@O}O_6 zs#u($X$w(=&5$@*s)AUiN4u{@WCr|{0?fHvdW)54V>yv#@^Xt5SgMwPZ(k{x+;q4O zJtY()bVlK}Ve9jq8{RzblKo?dK?~wI-u}mwuFPq^;dM_{)$tI*=y6QK`X%#YN>fq& z+EX`XpbL|Cwy-3fP(1aw!fiRKcLs+BE9lFa0cVtA%kz_kT7z&BFf?MpaO7f%h;vc{ zh00O9f0i`azp1vnf<{s1YM!^sq_jj}B8?(>2(S-HJ`Y#jfK2J<{U?t4<;JbVq|?VY zYT7o)a&Su&D_!%~$IoB&UNoFL@n_Pon!#KIGAEx2j9x5Dw@T^uECZ^NgpbXHe8;lTrLpXQu#=?f%bU>U0~xwyGZ@ z-aS6<<}kOi5~C9b<~DwQ{@SYA6BR}r;Gv6{Cl0lwN=V;`GQg8M7DJ}k9{{^QqMuc0 zdt3*s%VjLOi(}@M)jqrmHENFazF;az1qn7KBgtNX+3(@$IdX6Xf$h7zzWz4t?>q?fbr0{Anp^Ge)=%z( z@*)a=m2%tj#1Pa6pX)JcLgDR`gA|?P$NL`CC7U6l+R>NK>odJz2@5`JL3MR?Z^{aA zhXK|bDd@w!kzhEcr1z2@Hv@01;YQ&W;ME4F+PTMGl7>&ccF+Eqn0{0H-|~?{_9Rvb zIYpVWLx%xujVFWWhwG!LaHVWeY*g_A;x3WWunyM{xP=t=A5kX;Q?G&QyzUP*#*@GlkwBF3eop^i)dFJ+GYS_PdO#nxPiZP{o9ZdvEzSTf?_ zFgc{~HI*D+rO7Ced2%}kP5WA@7mQ4xjI4SHz;J9?motmEzrMMJ`s-b5`=tJu+}$B=Qfp}VGCRVMl@+A49|0U# zd6(Pv44bBd3#Ru^a^&cfiduVyiI;QXv2;d2<Qg3fnhdHA+#cJ=92GXehjtDg_cpEz4g4Nl|)Qp6pn<`4bmc9o>zEn81l#GN2P ziRz;v;VoZw@aZW4qVgQk7nsll~oghp%uoENLc>v-ftt0lg&i?2k zce~qP{ypIHehK*ydJCPXvLYsCX5wdUAeBK1-CTHRb{@0hH zAp#fB(&^O|F(9H)6Gza-4%7t(r6=||el zT;nDKR0c`3k+MckQ65(mQxDcVo_}PBwTux06doEvWn#Ke@pY6(!viP}X;bl!hyCcz zj`&>VTL!cBA9(xp^v%ybzJgIt_%hF~-PegZFMV`AcgFjJ#@{j_0qOMx5R2r9!KJ)rnR-s(| z+A={3V_F%~eSSy@@~IPwxyc0|s-&B79SX1}b4UBnyffI%7ysc{xuJ5*g{x#ik*#kj zF2r*w1tU(~#C3WRx6taIG0Okg2V$mCZ`=#XVDx}Z;p+q)?p!&G3+K-dryjN1DZiJ* zI%7=mK|a>@q)fG>se>Yy@86VY2go1{EA+OV>w4L7n za_;1JZnrs*>)2$S0DijBg!7^`xw1?%zkGQS7S!l4JZtZ_6zkj}DEVlqp)%wIhvy2U zDIOsLd!r;Z6cKn4qW41-NQ~@J7~g4Gj?Nfn7Ej2Ev=rjfO?ciH=n|xfGK7^FhQli! zl-V|z6Pln)d@M^O;!Vy?eyNI`Kqg*+n{{DnAcap(7&jG86^rY3asaE7((js)0`${7 z`Egbglqv%BNK??M62D;7RsJG#eF@F$k@}OwC6ob=PEum+hxbq@9#<*Zd|D&wZLzV> zO;#DOR$_3ixuupB!z|* zStY)FgcWG0#~JwU*Yg^5dsppG#}y!?_> z6GJm7LM=yk{u2_*s~&D`TnA_F(cCuqXyb`M^iHgF3GlyAMKw}L6{$EmH>&ESekmiUH9ho@f{L;lE{Mp!O;}8bhT--lX!>nOwTa-%w-bJq=3=*~v%+NQoa>=u(Mg-Y(!f4dZ>wWPJ@L zuvvPsR^9+~1G_cAD>c)=g!0uqp-FITW~@+Y+kPZ_gS(h^{vA9~{S*cz*O&<376ttT znFjP)iul%n+qyuw>Q(_7B+-0Y5leY*@2bn}-4gS3DLQi-o2r__5*0cSRI?|9TXben zaT^J*>gpm{bgpH5KV(mjKI_Co#LH1hhZB*cO}gle@ZAr{M?2hN%4&I)GSo{^Wd_o; zsEH~7a%P@hVq9NVHSWv@OXgCh)>`mjyS~28ywdJ?@BqRAuK!O2eZXMRN~kL1hV8R2 zt*kyMuc+Dv{?Ba?wBYOlu{GSbdG$=E8R=@qbKT9lGCyfQkie);snTginY>uAo$L3gH^_HXu&K6lGMAIs5IZ;hXcT)x!yqt>oQLVYZAW6iaFMOhtJ?> z1yob){eVDTz?c27+616lfZhe_^dvPttHzm)WSA22fj@`4PW@$D=YM<&BmbqF4s@wc zq7qQ}#Ip=hUPW84((F0hQ19gYpNR!b>70RR*yeM_Umsbv@}-c6ZH6G&Cz5kHgRK)3 zN`8VcQOP*SlJ2iUj)F$LYu=IF?6D44YpX!5jonq7);g+kFNdk%hyOC9xQJ}+0S}ys z)oK1@FKU>uy`x}28#mbqGw_`@Djy*Mmy_MCp6}y+@lFvwxgV_Y9AeQK`_T|J-oDLv zxfnG)X;8>FST|EYTUv55*2JJZMl^I8IH@y30y1-oZk%WC)|>`f!wQV1(VZzp)dflu ze-zofujs;J#Z6L_i)b)~U-wh>cd^SDgrbC!o;~e-U z(3Fg{3X4d;;TI;!b`+V9q%IkdIOB=SB*&JczW>m7DGW`!J&wSb*)3v*85G8h>}9@{ zhB$eNz=$f3ARHImU}z;e{$$OlZd--g0?+A2Y@Wv=ii#jxP|&5^odC_RBx70$Kbr>ee)=^sOsvBMfz*>MhvgtTVq)Mp`R3s{~F`#^W4pl zb(2Pti<}wZlQkV}hw_=Pi>?fdPD#qk7-gWF>wq;m^P<5!sF=Z4tgp7H&+8}BbR?4Q zO2+{rH09Or3leDxyj0>sD*5WzdV7c9Wi;a?^;QbY26F{WNS}NP^+_%1*~ls7g(6H; zD>&#B;}CMWYNX+)qb&_#a9qrc#JEh2NfZ62AeHBPG9w41x+spaExs1#yjp?Z=v=XsVW%;o>kgQcH1AC#uS#!`)UtNtlRtKnH)lRza z$;`~mAhxRp#AaK%y1Lln=!@tXjQUt&Rn+~&8o$r-rkQ9Z4V$W}&c0CSI7ECp_^a6| zS@K0vuW>=5!p+rYyjYp(85mA)Csk<4$qQrQ@k*H9bX?Q%^dJ+hw>{i(MfD}fV4(+- zz1aJ?C5Erd{8#*5J;JXEqExdT*^``z!57{cIh86{DQ8D~JK zK5MehX<$XSsC1?>Qah`tpa=x3GYlfRuZlv7)Cpl)pW~IJzfjXF3rTxW;@Y$C4vvR+JhSx4&(@}nkimKcG74Wip+89t5cQ3^1|mLmt%}{K*UQ6;A`OvU z@(CHNqI8}Zq{p+U1;EzLc;sSWq4h_r95n|IA>%2g;}b2iC9^+5vR>e!qngG&5sjY5 zL@d*VSk3E<$&W#M@9m)NWkdIO|CL5S3^dOd&7qya-HWNzto=?E@j-6Ibf)Rm-Y=dl zV*{f+O8H@ctDa^n7FDref5~66(pc3%CL`mp*D&ZsFQ%A+)efZ1 zLr)gMk0(V8P3(1$OPp5ZA=xVf3MVFvP=pnY>H2P8 z^>YK0Jv+6{A^I}ntOAI>hms>f7emSx!>>W)apE13Hukg4-i1L$8GhuTs5Qv($;-*9vfggjv?`JPBU9T0K}NRT+M>KbPRZwt0#1#3Wre$%otV~_Sc>SD z2Vk!nfgz7fNzjhhuaBNpyhj2%NCM#f{>j|=>tWuqJ2n^VgU{8rBjeCbm&XL4Z!2cM zL8g4nuPGA~b=to{bn!`GAeQx~AX5wYrs#@)d8m4LC1`ss6f-hX@v}X!t=O|k7+976 zwEnN)4N+BP;MU~6A}PtyQ6uo*2D>$ZohbIB9y`noW+?oUOdk*+I6lKL6f&iT*{^1)$2kjliuM`+~pWD{nY?jmjYlG;-6< zmgnEr4YpAMNru8$DLj3WFFsWRXrrYLKk<*P!%25w{JdmU8#s`>@cQ$V$NeBQ<57Gf zDClhiM9NGfZ5yKZI4UAI*@Q@7mcfrT(6{aca|nfBMnyC0(wpqG`d|;v!_&o?t_`T2 zt?fvYz4&^1@&cmBDA}sq%FTz2x}6o%35^!>a!}6z1#`f#;iy$gt{ViRLj>!+{v-V^edc ztNMTe&$KhCjj>AoIFGC8$-MJ$8YXC%RglqJuZN~ZBpj^8N>u!-n%&D zZA)vqZRA!z)&<~uohDe?(p3Y(#y7`5f(?Gbo75r6E4ywVQf4iTSIGCgWJKvsXs*q{ z$r(4cXQtDh`W8?40P$2JWFjdU@;potBL@TQfCW9eYp|uAKEMk7KQ3ZUuY+bV+ERV6r=0Mu;7=9&uJGVh?m9M z{zVn_mTZhc+nX%lqz~^#!)Ch_7E<}n66q#rF?=#V$BhtN0X_mS{fK4=#^BCgzmP2G+8g%l0L-s`G zPXxbkm={)_3I13)GwK|7RB1u1k|iV)X`&gDmpQtWR{=Z9yO#BZqps z<_ta=B%~+?52&OXu2WXQNaKnN4IR!Em#UPb7;9{ZtDDA$BvZ!6f|chSrya^IG)#}+ z);1zThBKEAJf^T-PF2Z`AT}UV)E1Jp{(%j+d zd)flvvAxb+9WEMj!h`dHJYt$$aSiZa{7T0i7C!6$1{JFkxbbhBX_DtH`c9V(4Q9<& ztL2Hsr^)%p*H?i9AROLBfF%e~Vt8#iZN~-^Pg>zF<5=$%UtuEl*X1bb`}QnUxf>wO z)TG;-WtA>&pyJBk03DML_&P`LKXR!R=*9-RG$ zRxByW9bD>Q^N~5CE+Eb`z+d2|kj#K$M+=sUVZi_$X3mH5=i9s5! z$bV8nYKZb;QARr+FS*+{&{BwojKG4JHeVO-FAwpQ8$>HXHB;Isk>srjIZhu^;YT~D zEvnBx?l7fbt|F274uzwXR_s00EXQ#A7N)&@AMNaIW zGb0TAc9!-g>iplwi(&0xCxHmPC>qGevG1~J0bs>-VF0?=aTxC6!uCL@C|3Pjz5_dI~&ll8s1MHzj;I4^pF8is*H#?yBbcyXfe02`9}X zC3*EeZGYO=kAaYRo5{rDGGU}Xm1^{4g#jkvGPnN`;>FO@{Zc6lj23YSmbbJ#siin} zu$KR<9?5=)pWU=!p?^QBA@JBHxOwM+1|9g-k4_G6^V83_?`adh<+mnthm(KxI4Yt5R+%XSp`U3w1r7Ow zt*y?D&dz#iA}sFSwmyco(FNoSqj4cy3ah#-=d7$O0&$AC(LKAzD>0s8VJw+A`Bg00y+hX8a1(j? zXmko~Kq;mKu0eZd|1Ub`R&OeP^Cx}fU_isH*BwGcgBC72(I&Mcge&^;`SAnXZ$g?s zf55m;lQK4~q^~G1kGyQvl<%fNi*51{w#-@O$5J>IpQvL$)nEJZ=xtfo{BMuBqIb{*&NVGj9* z=)P;ZzC0z`+{9i`J-@oj0H;NZrP*llrHGUZ{14XFbl>41BO}j!n&09^362d_B8$2c zc~4Z*$~QUGWSxlBQWQI}VT03Nn;Jsw9SXjG|4)yQBKiihEQQ?~KpKhnDe_lK!7CwD zlJ1WG^JgP@+(I15$1&WUw@~Fuy*0mf5tUeT5wo2*n5^m?z2#kJxY7(H*)&_!6pqAJ zfgUPLY;X`ARi1k4sN^v|5|Z5L9$cMn2{mPJ^kvuiO0CB`H;XoB)9>iFg0;kOMYIePuV&QTwd0F}O1$pO*W9o~Lk}}sO zJocuX*D{Kjul6VM&5_NZTX6K6RU}t0=e~Nv=x>H?-K+wyvcCSLW%a~P8BKQNNgQ&| zCLqb)&N)tYKD=%FKfGme-NBmiwMqJSlE&Axdq5D|<9$x6zpV{~c+*0v&$ z#er=DvKjbpc-(GLl#QER~x9~U?gmEeV%Oc7X)$6KJleD6Iw!c(OEKW zaILnueVMjqZ0T9BYH*x(Fq-+X-QDx2MkY1iRL0-jQ*gL8HoE78mmnvq6Zs?-xn)| zO-3H%-x~?}i-j_%c(87yc)-80%*oS;r{2TL$fI6!x4E$~>h$EqbMjVNcKha*oHA9! zATgn}b$2%Yd)jQX$@#jg$?Nlt!FJ1E&1Brb%EBACc~_cAcdP*u1Q3xEw~RV*e+wCU zdX7D{*^eymvK9c5UH7A;o*vP%lPOx3w#s(G9ln;h@9v`j10wO&KJ)G$&-1;qgRi;G zy9HKuP~u9zQ1?3+Uj-AP(zHeKlY-uekFXFXXwWV*bn3`Y8xsZ^iyFIiu=gVU4P`q& z55HH}PHuRT=R-h6A&NAx!J3Dl_%--rDbOc{lfd@9*?GyGHP@w+2qk-g3$7MRX?kpk zfgSk}UjFA5XSl|HAgn`;Im?m8VgQ-Yw4A&S7RvJ#i+@2Q@cv5(NkJEuVH$>LzaQJO zFXzuTVuzRP1M8;rsT~!ClmrKvpV|`lu^$sl8E`sUeVXSqFM1^=QC}uwwyS#l|8)4% z%GZ%s{^9;Hr>xG8JD;<-XPR@@VV_k2dJZP+io|sPC|~m<;7WCFvP4#Yn^CZ(&-ydT zQ??;Nqp5eUd7|>&=lxT#yxTw<-SS%t4ZCf}QE~_*wxC9_!L~TrOs!Uj6Wu|MuK@5Z z_bU{=VLBA~FGz7Eer||OO;7iL@{%Y=8_;p&s335^YpKZnJ8*0$hGg@ln!5?yn`3-| z*=OR-(ufDx^>(rXIW!sL>J0%ES$l(;jU?~C=gyS1vi|qczS97g4P?3z7mw39E!{x% z0%(s#wnkb>HB!u%n|Dr$-l+jrzQ5q49PGlJLS6NxEl8Ahb~aJzxQ+tQ6Zxv<{^39E zpX!=53AHYIc@~|kmE$B^i)(ccbciP%y3bddp1Gzy=<%9cFf+n^iPbF2;0C)4aFc)} z6F(3|jAbW%{2({hPMPA;YQb31G04`2&%Qk0$aPi#lkuhU9BfW$oLyWxZ?AN37*u3> zg?^_(s+v(pk@Vf`KO>TC`U=h@1Jg*mNG-;+yqC-%E8eIy{$uDJ@3)CeJi)8I#e|<4 zqp53O6T<@yG|lnZ0tp-JF<95I?R>yEJ^4{j%@~ z(I!er`M0_?G(JwiI1DB883-5>j*gC+-iBf9p3P1Q0Skn_cLJHdFZXhUg6SI*D1@qp z2=MFVM~Mgq9I-cyvSD~^661MgaL9MZw`w0D0}xs&W;M$l-f^1#yy>2U?DyNj(Y&K4 z4;LE2&6AxKVhC_42)6XS((wTh{Mj_rDL;-VZj};aDwVXtPI>m|1{n^}>EiTw=K9p= zEbC}$$U+GUb&C$Xi;cw?;ls zXUc2njoOfCB@pe`KYUmM^x4WBuc{xU<^ECzyeR$zk{2^&9OBKtB#hx}#QxE{mF* zKlvofV{YEUFt-{17J{D~cBKF)4(De18%e{$8W7VnLE4cZ z>47+1`_q_S5&@K1f)UXQT{hutL5u*8JCHbZ`1sNV`I>s1XN9p=ltVi_^ls;z&%6^J zWFCSA6nE&L6loLlfDE3OR~B@A;&HJ?iG~3OE%Lx3x8(7J>*3*Hc78k*%O#_I=M%^n z5F0$GZB=%p*@c1*qY0@}3`0B9Yi#Jfztn0~&YvOcS~Tn8%XQ!sHEQ!R9`jayunV}o zU7OD9u9L*@QRax``U!El*o5u3(YlV;sOFX9zi)00&;}X{>BUg!t`O&VId_MJ-Dt`o z6A_42WPTUU+V)7yqbsW72aFP6*Hw(s*f0Fy z6f@@VTwE%VnDqRorVj-QeH1jA(F)f~XDQqf>j`LEM)Mil0@E8C*zPRX7%)XOat3}y z*u0hIMd{QJ=gb+)TL@R3lh%vXFKd*Y(VQPen}<0Y`BuTnnLw*0cSk{AipQ#SUYHL- z)y-io;*?p82$P&9p&bu^jPJ+l?v|N-<);}e%gWit{aLnol=*S*2G?yIA|jC@9Ws4Z zdr)(Wi)!a-Mf6Hp9`x`ZAIy)=HKK}C$|jo5;u8usuiB;6M}EAA>O0>;V=|BCt4HHSnNp1!OZPX}8g|=sG)CWMu)I@%R2Ax| z$Q=cM{BI?r;OyfAyx)geXRtF}90vgAquCNcUiw3R1`|T>p7;ijBh$WI?4n2aU>QbT0yae zbeSYt2BWCzvBv70U$WWT8xuZ*|8Ve@NoG=AWuqXDtXemjj>4)c9cAc5PlnM%#QagZ zsH6OU&U1_l^0<`iYtj))t_GVwsEnHRhdvBM3wIge8xe~BJ5W*3@Sf26wwh?c*g9yM zwn^K}*Tq;A$rb#K%_cZnOI8FX=Vi{o>p7Dd;Ksq;pF3In5C*=IrAVxzh8!y*aFKZilpCw zR5?^$_PeXolk>MkRU0PKCneFeb1p;3TX+gcns6nlK)K)L5Y~yJdlI4OLikHYE3`gt zf3+f|!~sizgF)K0CS8qMyp}nt0>5^)QEK(OoRC0{T98uZ4H%QYzk@~+ zOB3TOH)E%9jrC7RRQT*i%ks>qZRz2_kBbMuwZE~62-}id2YEdDqVIZ+qE$Ej8zB|1 z6UK%VUc#BP{MOqNbDnSBt~_@D5fPT--rmiY`i z!)w2$zG-7=S@FXZibz#;_x-a;IAT)ZeCg!El#;5ftu1p}Vn6|v0vG004?gRA5n~}m zUsS~jpJbXGZwJ)>P$3L>%_bR2c41eb#nE_sf@>-6w>2gW+S1s;CXes-AN!i!+4a9F z?Fb!ugLhnLi4?Ht%5p^L$TnVh$7+}%&%k}24axhik>*IJ{o8ML+;u>t1gurOO z9B-u`r!9)%@eEuJ+LDRkRB}TibQe*nv^v9@$V!&txDrd`3OR7WAvo`TI)wAs!jqe* zoPV3n8*so=(pU{YhcULS(`=*z_3^_WuI4o9)bgYx=-9lUw*tNPrwX(h4Dq_2`t)m0 zy3b+aT+Du@C<#?Z4khOMqJ_QW;>#EeIzu1~#E5NI{D)S?t-L0$mcZHqP+#T%pW`Y@ z^V7*OD4HVK+A5E05!}^8M&At;*_>+r8~q6O`a+ru*z-Pj{x)8{(q_oNLok)aEG-#a zW^rTV(T`g1x)-qzb=sQpmCJR5K$-OQ_20Bfui*PJb%$G{>LCljnouP3B*B-mD zg6s0l7E0p80&}m}U^USCa(9FQ<@0Xrw>N(qf5QU67047wQ+d#kYN(3(`ep-;ZgQuJ z|JxVtOmy}HDl!aSEUU?w-WQ1s+SVh}WIvgM2vH9}Hs1qHKuW{YQ}^`T!^=2DkNap> zF`;V2{$SnpFL-(}tIh}O7;iuo*IQKr&m})R*^*=Px>mClRrAXxay!55`4;@vs`4eJ zpFKdfcpdO={st#(A?P;U{=}9hS9G+$X@VWSPUf6BTcUXIhShxQ*Z>pzTxP=pv~bPz z(bLApcm=E764$%ewJGk?oXtMqJ5Y}y=1(^nOYS&%T$`ebkhku5!=wp;J4lsAuG0Rf zvtEi?Q=Cev8Sb)(sT02fHPkwlcGF6@nmv^F{U5|5ukDQQq&Czj_y2|e=YMXbMW+%i zENVoN$~8CtPGI@L<3jddrIwhvP; zpF?Rh;8`JP!FmozN$Y2Ntd)KW-x2JMf~!Z>2rweiCIrr~TwaDzb~`@GBufvn8DX5L zAf{Cf;PXRHPeJ!WV+rH_mj=n27l+=6W5NGJzY(&2usrvr*|GoJBs&&5H1$Lp9;qvM z7-D+PuVa4LA+!fhb!c)b6{)e&XrvPHXssq#_oMt)ic(3^cS)soKOSio%6W-)5W9Ue~TSArC6coZ2%>3d_e)RWb=UZa`#9eZNmPa zR5bTxg4c=8=JtaAGYRc@xXGASn-vUlfh1roo~TzE9A!j=Ha5Ug(Qe@UhXMn*2=OUj zJ?Oyu6$s=XQz&vKhmgHrOgp0Fd-H?^En-l6I?~&u_XK&b|8q74Ay!T`x z+D@NJ{`w^^Oi%GZN+wE*XF)QRjYJm0CLoLbxI!#!3?m~A#Y#>HFI$^RwIIzcqU*ye zl$>T@qd7~3=6S0T#Wt6ye zV)WZdT#?q5dkwP+Mj#YTtCe3?9Gv@0qcAbQu?(TD3MG*Ak`K_HCH&rQ*ag}{->oU? zwLo{EujSMDzwHEn0}nfZHh|~(8ztumLMVWHj)iGq2O9LFdm{AO zPZ^-OdP}|wHjh^*F}JV~k-?LEshHEFb-gT^HAk8)+)6GTHz@^5nEP47e$qkC0f0+G zuKGGj7AF46c%*mC_G!J$3}a*HUVYbeoPM%v8^)ZC{Z zhUIl@}j5euj_lCTG4-l^GHD2XsN`Y&ZMj7c&a(H2yz zGmzStEgym1rjN;q-DpNlYF@;Vy)l1yrLiNmIBe?JTeh(?A;=)qM2mxii!3hj(!Eu^ zxT=yX_p2KicZe7s>8i$S9*Qj6vI7``J`L=n~VyyH^{NhfMo*Pc1s5t4G5;C^p| zHEqmar^s|$LO=Xz4*_@o9k}$j@Wvt8Bdh88xqMi*>p!vTUuCE=p!f*W>3$Fia=69`A8;0bX1pn=@xLERd4L4Skn#d}VDdWa+ zS#X1H=$4}iE!sR~lRP;l0MaY5*gX1IbGmGbo2IMpL{qg^amlw{QB$N`N2KLISdu9Hx#Or|`dp|Bxrp&!`kuBb)YEva`{9f%^YiH&AlD5% z`-al9Eq=sVq~2QnXivcI(qTdG%WgPSH5Y0tI$ zby3RC!(LmvHH4wrrKRDsMcqD7sf9BZmO_#3nC%_?d6NG_Jp`Qa&pDNR+7M6L_vT+J zPdDgfWBs;4MIk+Gok`hLAuf@d{$&&iw9?>v`}$uI1k67YhWFkEcW`y;1#eDuGE2om zvc9NTf%o4@`JdEYJ%13*C}+mj^NX+CN~QGHe7tBR5;NclCof>%!NLj3=iEiwb5b0uOS(C=?!6=5|IxCG~6Uib&);emHvk08wJi_hp zAO=YmQdxbEFfdOtju``)`~SCip8T+f%5R`b4&HM>C-eMKt88dJ9V&h5$O+%Vf)vRO zG~l@J9hCr^UU6q-`~l>j`m+DpHTn6I+VL9yXZF7CSeOyW zcrEdP`J@4oUVun{$W;c9@}R9YHiXzd-CcYS~!x?>DvU~8GHG-(d^*Y?N$`!e1rn- zq`KNRSXx5pD+WYGdO}GRJO1$OE)Kir^7GOMKQu*x|L(!Sx8Hrhf9~!IT6(aT_E?fE z=0CR)a!7@^#A0MnmNBTu`^B&MtXi+xPwFkPh`9^!Ac2)lzWV#4)~5^K*Ax=K_}#)n zq?Cpp+!|}B{HXWg%SC8L6hIxZ0`>-2vb*eJY6Ox&|w&2=EA)31lpZ?bl1Ky z<$()idVzV;)Zc6@nXL0iC3=4@T>!`}bNMsJ;fP3l?U9x9v`dq@wpV-0S~9)4*)GMO4VRMb^ zMC{+xn@ylmZGN*j;ChNzM2K+KE6%5WE_!$2k=pr!Pi@;0^sI{zWsVk%wWq50pjxUM=XhmDh=jGIV_^%D$%i&23=_mh-q5sX<;$%Qb7imHvq)>!x9L= z-)OXNW9I;-KF6^$-;j00)_#yM750K=g901BmUKSm57xv{lk_N92p)LlY{@yf2v%yD zkge}5q~h z#AI9}wI2zFglV?H)MR1*Y`JgCugY4OQ$qFOmp*MIJj+;|#JUVH6sYUY~ z>a0?wzJ(>gDz#E+seVq5MI$EJnW3pYdTN)}GUH`V6Q)}*Q)G#ml~+=vP(zev2vX;1 zzMjw*3CMN0qoJwl4D^?Rng8O9@+d!BoVL~(2=C+mVY5qs?{fC&#A{PYXVza}ipEkKn&QmOTNBRvpw1kP$zF z9(NQ7EleAg)#95X%LCA)jxF-amD4jbea^<|vPEK)F&6@{M*5`xHLd>8{#8W^j}Kv9sTUq9SSkmHHZPS@ns85SLLY@}fEpfFoI zD_{n~7zZxKF$GfuNiI4c6i2Rn=daZvW!2u`tG1@W!z`$L0h{FUat2ou>5wbK!NMug zc|nBB_C037zLke^CD_f1E~e19KpNw(?)kyr>%0<@*NA&@Oqy$v_iucv&F?D|2-9EyW15Q0a!2A z7TLt#nlqcme$6}S?=xQBR#T#kdMeSrP^OfzwZs>-rVdA^nwxZVSxfn^{CA8*oaHS@ z!!W5|D;ORBf}(@2o}25$Dy6>8`f%C>29&A%L)UBQRoBkWw7H<@={g*EutA`RZZzFY z%+15Z@T)9kW5~Ag%k6=zMG@@I%|(~_rNQ<#F2kT3R;zZMt>Eu zriem;?^Qyq%JaZ4+CnvqXvlCXUlO>!^cYhhnY8U*Dhs1XpP*v=Ue`!GJX_CuaSSKB zudw40(PiV%40Qw95dZG;#M|N1m#{2cgE1bTlgZZF0}s#XKBlmQiur3*&cTH~5RQUw z+B_rojB?!bZU4eDb(g}HtfMDkSCw1DM3Z}XbCnY)F6Jp>0!Yq!RWe$bV(v48sdX>; zGLdp2#lI{Yvcag7nfXdnF4Kj1E__}Fm{E$Htls(<1ps1ykvPlrj>Fni#DrnhszmCS z83%@U+ME&YzCF<<>$rgOOeCnaLRDm8PHD7qXw*8In-N)EG_IkiqCBk;Cj;?>@)8Qe zGiegNQY9P=4H?AMz{^U7EI0gc_uYtLSEJq+ZeV!;4j0Z_!DVXRz+3L)c$zIU7omJz zVV6^|{0*%?53(wEu=OVY6-uDI+I+u5222@~glO{!F4vKlgaeRF_ea^PAh4J|+!@`H zv&2lp%R(J7g@bnw=8Z8<1ou0P0(UY#5m6>5Z`NKT8AA5&nej8s2hXJ>B^zx69$$Un zE!_yX9L6%Zn`n=n$`^*LMub(zuzet!xW7%dVh+TI2sSWIlGV&tkZd=^!#Sy_=x7t9 zdkpgNq=njHY@jl}h6O458!}_lnk~SWxQm$jZW*ZH#p{8KIO!6U9bsElOxH5kZWsl* zTs!khl!$=7af+*pOGlo1c?0zz(GbW%x;z*h;Z6WY)90L=qej>WAW!kXA!Up2dq{AJ z*{z1t@g3WX6Dg@u7lSX#j-BZ^KhJ1~9B8-3iB?D2JJO2zZME`H)Nzl(yVS0rBLyaC zK7aPLG-ic1q7NrRNRpqph!FQP^s`A9+Ifdeba4Qw|H` zCkLEVF2Cr9DO4-zX=rJIwhZhAR#i1!64?K01*b;6d}CeNhJ&t^IR;N)OQTK3#s(y;i`&S~Vr z1V;=(jnoLmAlHn;Yd~zQv~lxzW)*q5O2AoSX*72lx7mkR5%iVRNxUF6Doq_)mpK_N zatA!}zv{jrG?gC`lJfT^zV+*kXgG&BW)Iesf?Mwz=k^EGFQBmSm?nEE5;GiCDr}Xk zWHVqg+SLt}afpbU7!yWIi~k<$<7Az36^S4$!y%Q4R2YyU?Yjyh1!EK$9qh0DnZ5Ag|O#O}WPVsk}_fW4247;%|tHj6s z{@+X|Pf4vv?`S)>oYt^tDTm5l5{Sg#N$|>>Cq^7l1Qj^uWXo0>O~`5M^J zywahi&Dt$7xK7TQ=nWlsD64NR|Ho~?x?jFGLC;UfZ*Y0=ZvM;W0jh)A042ec4Z^x- z^~LosJ0>DstU4BDQID!(GQ=1fdnSv+!ALYko2(e5MmH(1l~@so!s#$TS&L|8f*2(s z0jJ@9SkGo>Og z9@;#dx5-b%(DShkK|5VG#>w0eR)&&IDnEYxF({i{s)UJ#emQ*wlw5GTWW};On%36` zz+?cec#g-1lvSs1*{LNZCClN=dm_6lLpnGx2Egv%J2gL$(M}#iWtonWVq1i9=+A!nusyX6_+1IP+;uR_sAFQn$3>r&B?{#pr8SY8kwlZ#FhW=vImnnU{cx&$rbr`>RDWqm%rr6erstm z!Cd(*`Ijm&dO*%dhgkJD#tkjk42t$7vcr3nx&;MLI27(YRdAYyJ5|KL;$=YRys#(rq zY8rQICaTpjpMDXnGehkVNTTdQYoLdTjr+)d(Qk|xMY8U6hWK*2^K>_Cm7;Di8GeAB z4vkSn90sdJQ)e5_u{icqMy>)jUcSh>3vH}ff7VIlJSi%#q@-G@UZ!3SE85-N{U1=PrVAGH2QKVv zs?`p5js&@Fp7GBel=Cjjn_XX8Qs$N#VWi%MlLVd?Cv2}+8s<0%P%0YU+5K)YL+9fg zTzBjc^zDfzfP?wZgZ<6RVX0M37C7P_0MavemE(@Uk-u+O)0oSubfLC@dXb}RS^3Z} zk)4SIm=KI~TLI>p-b<-RS>YM+~oHl&?IadY`$LQJHC;=0QDRcFj_ z&LAW$$rSY|>Ri7`RFF$Vvrt6#%uu`2E0X)Mp*)k;sQo}e{depRvo610a`^o8b;0Hv zL)??qH(iFLrNTFzE+)GqtIBYlU}m@d1@T~vrYWkl75~%n$+w>g7m5NAo{4>d+2lIx zDb#ka{76GYaPhgnoro%#!nge72P~8JF4x5g7h7HSsv@({ERw)X@O!VBJhgQNZ46@VP+@L0jxpkDu?5J0wCK=SDwKGnv>=BzAbmB9*f3PtQ=&A?LfKf1o==-9D6?YPr}&eE;9 zg!+$K6|y1W1cdhV|9$&D5rvlP4e{+fY84K*(Ne*x%6RA-YL2-JGceIGeXhw7t}fgY zAd>#oIN!j5BYGWPRz`oQiuq4Ya`*1*ZC?5BPKfs*#=S=MlIPapSI@2&KziZ(q`>{U zyYNnQBYit75%&2Tf%F7qhPJPmc-Wl2lfF1MgMjv4F=qdJ$ z@YjR#TYyyyI9yk2B{JcPNdwz{j+P4C?1wFG>mJ@={C4v=|HbDG=6VRO#-9AMZWtTE zj3pZouWeBik%j_(C%EiiHowC&v@E@p^bgincS1pYl3oYWyXVTYm2gcq3H_E>r*YoU z-N20fS%%f;vjzg4o?9D2bFK_EqEIU23sbo71!3{==rlCb_*llIw#*KI#IoM$ynwmH z)n{A0M9<~uKqEMspdT^&$RNMIu*JbD@U3%GUC(1K@`BbjUya!qpT_*#qa;*EK?rVi zE)Gj*p5DK1|ASiv*|0-5OnwV>SA%>8NhJ(;1t4RJ7{X(|i3BQZ3!U4?14v$3K+H2X zJDVh=a0CFp+6R#-QWRofMF*)zG18*Ph%cxmc0gM$|M=I9=8T)?Db$(uQDmxKMyiR8 zs{xaea`ylao1Q?-KrT?;vqR@`gb-135POELN>%&UMZ(WvTUaSP|dBSIn#=a(^ZHtbOUpVD9}&R%qDC)JYAN}3V96hGk@V(1paeq!ZPKDK$3 zCL#g!jt0GD&RhfHkdH72|E03tD+Zyar>X}sPZ*ZAMKbqDz{?B0YyL}f)PT3nQ%OwT{Gt7MV9NdLf`EQUZi#IdnI zQz`)$Y!M70nwv854961a>{HJ4xYF7tSWbNkO*IESD0swI0NIMjcSDD$@&Iw>hv#ST zV>~6(ShFIXwoH?VMITHe{GLb)p&jG}Oq)dqJr$&d;%oBpDnE`{n(PH5dX7pjFEqS= zXPVKj*nk{;5>`ivY!eNG=P}io_vWAKa;!u~m6|HD4rcg-Hu#fni;=6zwur}7kI&{nCSN^rcb~OqftA4$ zY)?dH+vEu>y*kVVpXIF~ioQxLmFkg>Eh}?b@>)#oYb3p8h+366)EDO#6J!VoG&JV{ zVUDh!f?r?NNUNuEg#<-ye#xT2VP)#lN@e%H(@Y%qvRsR?acsbC&15uozq5d&dpmrh z{eybeY*7{G(oe|9$GAuhtrikWf^+&4H8O*g?s7_HR`u1}wnvQYD<`a4eyy-4Qw?72 zNuP;`Gqk2H2L)A#d`XVTO!zqFhFOHz%lpeK(xrB>8VH+z4^CS)bh6@bA_V(5fSKK4 zKA0^BtYsgnzVK9O))*}VswHN@tI;nkW4_#MY#>HmY=X#;Pc zZo@u}?Z0;1^*qHe!D2pDWy|5h`U0$axIzx-oB&H)Jyw~;X(3Ii@uxTC~6%R9s2(Em_MX|6o9Du08^XF~{XewS<~ zsSi0kTO{;L@bx79bkvx)f@>Sj2=!>?@8g+>d*5kk-Bd;b^zRia(D&x&BAj+W zX90u&4rOlj1X2e3c~Hx==?k9m}e-^I?;rqSF@q({Tpx{Mp!^wUXB--r4 zM(@imHXWOEMs0Q(K#Yv!}LQkk%)1;Fm ztP`Xpqc;b$6GTH+CtQ8c@qegOKG#Z5hE0kF=+hZAY}mcs+Rb-9XLdbUo4E_t88QFc zyXCnfnf&5*W?p&FK=Cp|#)Um`rIo$ckO-by*`!o5xa;$X827;WRO`+_vc%5mGCwZ0 zl`cgALWui-ORp2x;*~6lf38CcaY5K*l1WhKhebvDR1hg|@wa(Sc!5YHuXIhQ^k(;A z)$&9qCHiXJLj3aV>}>w!E0je5G(D-KeC-F=}62G&% z3s%`D8STL@0kUMJ{cM9l7U;c6{c~tYk?71!)Ck1z60N*=6&eoC!_ik!#p_yd!#f3g zC^wb*;2$L6wE89c;#2w~L<=fgOH?(G-=K<$2A*Ll;C~j)Jy{9K!jtrn1bAjW zze|(A-fB>JSbTMQ1BHIf6vb>plCBf~#n%wIEubSD)Hz>ZBv)I@oy1X*+JgPImk3}7b8&)uiU~yVf$%vwy(?UDuV-1 z7$D6viA+;j<9jfE825Tj%XFFhc0%-!>W5*lwvzWqouX4aQLg`T=)%3~CT^)j3u5u> z23kisM6Gr#EJ6>+NS!11=Zd@6C6u>YvlbqY@MVMOqG$$xGOZJPS#TVF((NR*=y}2) zwA!`>n_x!EAScNN?{%FAQyI!ssuw8JQ&wiQuJWK(Q7)fQSTTtq5lQ=F$;5l?Gi2mj zN&j)~l#;FYXGI2PXIH#5W$Hwnx#Sw~Ji(6^m@V4eZE$N-~DZRrA@|nBgE=g+2t}bu+l7 z*>(KFQG)O0Gj)*R(ads_5}35+-$)T=`fAY)(UdlL@xt~Gl$hYz~{W=@ml9|B&XHz98?5RLawpAaa+2q6VSgD?y+0}IwE zN#BT`K7Z>k8zKc22*r%HR&}0jxs#}iGlE!YM0k8MLeoC6?Cu#awG7koO&c00vZ!Jv zG>c6Gw93C`*&uKhO*qA2LJQn?zR%lyzfP#|hS)4H`8tXhDhLSys#RhY=W6y4Acw>w zUd?WGSxkId1B`lIzDHg62d4A6I`9}_f&a~4$>fL4mTO$q#CliQq-3e4k+?>7oE>`4 z`M&B4dG;TaWP7PLXbmhl(|y6c_gR+il-C{I%dE>*van6NILJx_?K>J<+rWDR7$bHU z5;mmr54{WWZPU}nf|fHg*J{J{G&K7RSaE}nfcEsafjRRNi8Ct)!5^|*4f?9*ze?Xa zAf2PStfXaF3KqW+3TremOMFHO)BTx$(nZiUlOP}vDu`7cFejsDM@FMy>T&03@47av z{*@MfvTK(7YcjHak6Ni76^Hr)1LO5HZAZ**_i7;hnui&Sf0ZjVVS3`@gk;r}xO*Kd zub|Jrh-8~y`qknaT4(HKr1=g8zQQ(EH&oFFuv|T;q~|kSfer`GMiQEye>jg}D~^6$ zWm3C??hNv$N5`vP(K(?khGeCtKjlBO_lkbhe91SCz8{Kp2>eAeN@#UR&0lQAIFzoC z4o8Q&{zTncA`nIn4pFv{{|kjd1t&f9hbqq;OOmmMi@ZJ^PdtBGtk-Btz@=y9wV{0X z=7(1!|3&lr&^s#Fv)8~h zFCv#uH*-2KzaAoqh}FOjqJ@%Q?T+q!OnK)2h+Ap>v1)Ia-}7G`@AYEn>T?_9#dbVW z|9La3Y3)`!D!8f&x>CLS+Bx!*_rAEbt}Bb(>yrj>9fTQp!Ach{#W#Fs5+8!L!iqz} z<;T}2i`n|dmoSJYFyD72aN7A~``Fr=)?|VRM1BAVO0Dq_>Mj5Ne*%z_IqTd=qvSzS z0J6!3lkgQUpRU(|2yjhWMkeAR%p}X)8E|Xp`C{6+FKaLIVkg8sx-{j(#(mbR4`Zxu zcN|zNPj0;f!SX+U8Zr*Wt*A&|8feozZ@4el{wA1uDI1IjeDK;5?i?kTCP_15GF)~Ce@~9`#QK>0nfT=*)_3Kp z>aOE1q}uz0(5m<_t+d!4-^xZ(#D%rNJpIjeOzD8h6#!t#(q~__WA1-r70IH(>WR&a z8ckPlEjBX1V;8j;$}8i9Hb;=mr|P}C@JfOG%iVS3&HV6;i=P%T<)`gv(O?u85SZi5 zqyIXc60`63SMj-X>Qk$JjZL*rjZJ&`-eA1t#bA8d!BhDSC_P+415PH9Oa)^LAu8cv zPCKxKvqO>;bD)0~C%i#MT0|6GPLg;5RTGsti)re^6dYvyL>hU`&bSp3ILL18SX0d!;mN_L|8tcWZKKe!=R z+`l1WA>Mq57K6N*V1niv@o!wYV!HnId^M3hKQ{0$7EKxkNS>jQYfl@oYv&zj-gebq4nK^SJpvCE2UVgzGZ3rqn9L(H7`F(LL;uu0hx)eqI zVScCETO+R*r>phR(PJ39xVRXAq;KX@#+3Mu>*Q-%D8ok>J@Y@D@!Mo@{^&oS^JJX56oiaXU=M!=H2ezCgza z9HXg`&njVjizIgq{xegv=bo14(b@;tu+<+dY9>Lf9; zn#70iRt-i@GPA{G&qy9?X6Kte*ls>T?}vljn1>(&Y-oHS=W*A3I;UVheQ=Ygk*1i{ zwd+#dwk1ybypQ-XK_Y*K34>hn+%{7+v z{AEZQ`_531^aQrbh%>n>`k2-}?fduqob@Ivpk?P0eM$tRC-(;q zrnrG-n4R02#gP5n$MJ1H@4Mpa@ z^IPYIX!;B%LlTm=xA*>_9>I(f{WR6P_+r~DS2|4h9s(>+hS zLMOa8&coI|*OghHwezBUr#0ap(x0`Sn=l88EPva(BT)4kVAQ)&8p?8; z<_5IrKhJ>lLkfv7C!eAiLpeu!x2vxUuJGfS?&s7mDXNP6X_TKlxgv3yO=+v-)C(4Y zZTOosyEVhlTIJ=r7dmYc$Cjm=fxLIiW%|+ms_xxSssmh>;C@Y3BviponYF9zyXVz! zzyOn*Pp%kK>BqJIDP|P&K|;ZG(y5zyzRQ()rTdLc{x=pe8enw2hlu6A*?QSg@ ziGW%}bt#bm#+kX`f8Kb;2u0-dbiZhfoXXv21o^jI?{Gf$jqLt_Ont!AWqnk;MuH^# zp8F+m@QSb3mbD&?cJlW$O%;-L|6)w}naHZve((PJ(z1h>MZ{oLD2fuKOaz&8LQT`4tIUBw7Ij=UV=?z~?$Vob9HtR- zUXIO#pHANU97BpxvBrsoX@wCakjKU9ha4cPc$g%|dwJf+ds}K&2Z%^!wCUMON^k)L zY9CsGOEGNzg_mneqVmt_`xS}lE-w@qF%cTiuiS_+P)I_9&HsYC>9MEOS+l9`lTL5Z zv|3gIl~FMN7na!w;ks zq}RjL>_q)j*`jWPwAR{&Y9%8>SswnCDDmuNY7pHBLmt-PU!R^*vO8U7XcA>awa|KQ zPbj?}$|1Xi`Y&AKFQ#+H9qH~)@C%*;>_A*RRk$SQVSpXDfUhU;^TB0>IAMzm?HCxu zLkQI9X&FH&__CG-hzbEmkN=KbJUrS;XDyM>@Mt0^8jlyoW2O+fY}Q7BpH6kVVo)+d z%NnQk42G+*G_zEgQwd1#M*ZUK%WJ@lE`T1nqa>y*>W#->q6%j-|5ya3#gCgf)4 z6B-=r(hX%2`@;CeIZ4nm8sWFVT@a@GETev3npu`)5hIgje@)L7@n=L3+PY8xYV}p! zh|g>7yR?PW?YsvbIJv9a-8If}y1vW4sW5wS9us=1T1|j)zpq1Wy*M``b{^DTdmMZP z_H&o*XD{o%LiT;tcFc;=O4GGS2c8^#VP)ZA@erHb#YZM2I^0~%&+Ct?G82+KX@Mbr zfZH0-MFFA$H0ruvAGpd!@k5-nBo((L-T2iv`JZ@T?Or!)`N?!&^3~gS^ z4ZO*R#AsX}jVKvl&orH)$;7G>DW+OinybmP&;fa_+FbG%+iO4V$BZR?*&f<^z4Lv$ zgKxB1Zgd19$F72REfX(;?`H9ij^yfvg0cI&P=TXH@lWn^AUm%s?UWTDTSEIf^lH?060oExO2dR1o(^-M@&~L z8mw*jDD&k}$YFtuarta&6?Zs=xQS7{qS;KZGH_f~#%+`sAW`C0s(Ec8Mc#~2CeXUe zyqUSz;pg;rON1(PZ$OmS{oykLM>eCMJZ0T_(X@6O%<%h51*GGlI0KJ}U~>k*T5Syc z^u2$7{ZJxxTlZgHdz@>^_Wp-1t=B~h@7NO{iCOJ7Q2u&=UF|k2e1b<@{(PCP-|?7k zRmFez{%-EP1#@J^>sp;;$*kDBoST$C$~d=RD*sp>&iD zFzN2mg4SDdIqyk=R32Spf@ED;KH@T)?);j?gg^E@pB>6^o7RZ+Jl#lfO;)DOmSvkf zI`Uu3y9u^99LROK7ROYD|47at7N$fd(aLN+=hy&8?dTdTudignB(`HvK^;Lc91NX( zBeZ7D0nEffKd^>BXY!lP3cZX!j-#LT=TLAp{mhF$yuQqunc<$@Ox4t^7>3KLnAx4% zQw!jjkV{m?f0Ls$9bUx1s8=Kw6&UI&1$YB)wk@>|dXg2SK<{=EU*@R)3?Fho$Sw5J zNBV4F{sx1(ZVdRopk&LGasWnkj?lUv%;Ec1Gso%NAAYrlcB0PlAb$){*FFw@n-Jb2 z)NVw*d%H~0=y+_ksuFZPS$aZUIPd0@l9IMBgtfXL0wv=8h>FUGRkK3*5u;Gwlm186 zFgf(eylwG?qEFd|D9NyhDobU2&vKGLM|vG6;(hsf@T^>45B#)SpP;n02rnGzSQDr9 zw#|~6I!dDA$7Y^TXoV(<_5rJ7(^}-=ukYjDm-jnzWN>KkzWWKHR7y!ui6aQp@zK|P zm9v^8BVEUF*TPKaQKB>PK|JurN$+uUBKv9@*B8g&rBy)=a7K6@12xP!f%=1>E66via5kRByavts52jyf)Ia7$%*1=`k_sm(SV7Ttg3Ca#QGs3ImR9QeTB&{n#MwVE~-aR#RNs`TKbp9(jJ zBR(^U9ty7W4IPqZ&715heja9gz>`BNDsVnv*ex=6E86X^xr-%8Fsv)rp@*K5n0D*} z6o5uRjxB~rg;B;dXDOd)d4L@x8U{-kN>xd=vxciHOZ`^#^f$X4m*t$6Px}2&Yu{nA zbAK>R(Y=jyIhuPn?-@qr%~}Go)`($XA#hc!TRi4{{8LcnO)=5pHq{=kk@<9~)VaBm z;_GNsTMQ&CQr@)z@DQMnGgQ6Yd~psWoJm>Pgw$JhlC^)!qBqo%)U$oD^b1K#NM#^& zoCC#;51F_gAw4kL2F!y{ei1)fqqO*3U!p+2v`C02b zT@B2gm|`{=vp>QA-sxNd%mV2kEn{C8CxU_Z0FkrgDCm(+jdrTlXtLQQHM56cpV z*19pLX3SXxMT~~7`|gGNRt>4I14gjVyOX~?b^Ku|oR%eq|CU!n$7zgH`fW@2bqfVZ zWKz%JWqaRmWCBCw72rEM6W9pCTzepWyCB7Fzg*Gqy1a9LeeGmkh0HfN+^jKj0mO%J z;Zff=|48A-mt{bIKwbIB+*<9dE1#(7|B+sKQ6zVcAdAZUl002E{+Ex**T19W=YSQA z4Y%`1mnvM;D)MaNmC=SH97LKCT}nnAu8aZbvTfULEh-pSR zvK=?v9lK{N`=8?#1u%_-)b>*PqeIU)oww#fP9c2lCzmz_gjF9ylYjQ(WLZ?n4v}-= z#Q!bjQKZixieg(L3zCq9hk#7Oe#s05#*Oh8FY=+cujid^?mi?(m#$nT4xozu5J%+?C}sGREyR4Wg}a zJ^<@o{!BT4e-M%aoB1=syF}D`H^ATP^UO%vvhU5}kXg}SGxLT?-S%Grl+{lnXmYiP zK#HviA9mxqOOpuZjIl7+uBVslTjWbXJD_s|wxPK5=>YJ;8p&P5SU>fTeRi;NvTNE*tLvFd`oTYzHoYlTR6t%C*0FvZJoq z**@P~05rY~;h1ufA|3onauB=7UuF>t6duKh)T=+b5sv5dl9n1Nzb3_rLZ}S?ctW1m zTwgB-GZfE0$0`cU7^!?y0GNu?+p^c`785(NAF$_uN_AH4WMVG>-PGvr+;VCK8W3Qzc?avo%g9@$pMUP z!i0r1E{_-VU!ts?`-PrW*p=wdv?ly%ONyDW>yIA*ixqTL>){gDM(yf{Kh5zbsC=?v z^{r_yDWL^K0^CUmvOljIcKR;1`%D9wU%Mq)h)EaS`?`D|dfagBCi#k`?Vvb=&&VOmZE%1{^rNKJV%*b!&5qQ>_{E@-D_8H% zO)6=9qSEnw|1>R*B9|y5`jWvWtl{Sm8yh^R39{&TFjAC)pN1G!8q$d?w-u{}9(Q|~ zeXo>V=La8FPalO-Vv5jY1I0Zl!tuAAS(f81+ZfYKEzwZ9vKB_T@}-8zW=?#Cj#HWs zI+tR^RcNsKaBs0HaRmijp&-3!2bIzWn7pWFo z_Ou^_;LBQXV3r~+Is68bMMusNlu-?%%fQs2q7RaN)e^O{#8q-nq$(xz%a!^|05=?r zxtl0UsKG>X$g||UIe|m^+%0Klx7cViHs5*M0Q36f3E8@2tj-0O0fmbD6yK+3YP*+E z?yj9X@6(GZwzJJozRztpQiss{&5Ht{#Npl@C#tF=3{%2dE?4##Gk5d7|JdODfs`m6 zlBnipDF_em^#PM~XWtlg=+3+FXIs7Vmv#Ce^F5Y5!?A1Kjn)I!Ny=caLjBieV0D;+ zC-q`=zyi#=@2J7|h5y}c-=d-aR=)Sr34i`x%OBDQyshSyDzZgPmh(0ma)pUDh7&7+ z{E)F-l|S{dj<%Y%k#pu8{{e+#B&l^fAO;u+ z?d}E!>&Wyw+VzU2)Q&ct?HGv~^gy^fEytUA;p9mOn4*@;Y8_3St+ZUd4Gs;RRm}C} zfk-Gvo?FDu4u?OI7@nmz2wwiO5fT-sgFe?Z1i=%VMWi!QFIDM#hJ`J^sTE2gyDKLA zxqWK1`E5;l>#8&WgrEfZe<15~adO_=P=SOtcodiYDJKqE0;PONv;gHxiEi}gB(SVh zrMkG4g1$Z^QUh2;Aj3(Kwt5ZCT`)BK7iO{VNdjIV#l+I#fh z?~GiJ@w2D)_e}-F8ITed4zN{*Z>2i62#iECPUyOi{~_H2o@Tu{FiJq3Cgqd`C{}-v zfqKvCb-tX~O*l@t*NO`*5x?)FCz563gUErHq?y;li2g31c&P#$dyB#?H{<-MZ{B*x zR{Bp%)WP^hsqULoTB;05;3E>VA5$C%0indDSk!IbcE~%-cj3DL@T};sxpX;}j_a)~ z>Z$OqpoNk6WyMr}`v<3#&##Ad9v-OL#p21-P)_*-s~a4_ZUjmk_U->^N3jKmi7uLm z%alPZS@@N zTplzxF>v*9cn3vB=P}axIu(x7;eXP;Dvnn8rV-g7#WB3_;zZB{_|?l3BBl7@Q)F+V zqgjq4&if^4QlVp1>ZGeri=l7i#|MeU8<@%X;{En?Y|auUoM_m_`t{Q3ZBboP7Di*M z6AX}!71oytB)KJ&`&1N=6#dRZ=}JULv6uuwXwh35O?8uMi)Ojqu1cHjPk7sM_3QnU);CUo4O;Jd1qPlgdqP9+nCj~#?)KB=JK^6ycFwNa-)>gr zgpHyAz;6A8T#OyjkSKsaZM8v5v$G6F!}@`VJ@F>QQoAF`?MD}3ubU!O(8D>eGXt0I zi!Aa`s>ti>@`w-N-$_*M1j_3g9`nzaDJ#w+O#e5J()|V6gmW2sH@`GG&Sv*IFAF>$ z#ooEd?Ft!O=&+(~Nm))Uj>2bY@HE1B14i-c<&s#EC&M6A9J8hTouc(jT%fFv5a7pu z+FVcz(2!mM?uTnLch-Gm*n^M@TT^YLnWE{`xm$ZJfz(xTdQbazEIkZbbafCKeM&8U zC>4<4( z^fP*FmS*h@Poul95^lEr5%Qr~$_<7!ArUg6j4TBr2t zu?^|k)D@6b_*I%HYlBU;#8?_<(iRCXW6#1{Uh4_!;C{_ z;|x|eu-PD#l9B=tlSP`Jg${l+##(gTH#T*g90N+xJz=!PP#^%_1FXYccS36yjZMDW z#lYrj?qM(IE#l>F@osI1&mICGw1i)-MwoqXfZ*xUUtT+3k>QU(($i+Zl^hInMHd15 zn#kvVL(}U0-R8UhIMKjfK=T(Ub5t2xTH2Pk*QZ$BcDIYz_r=nep6$~i!l5Th>vFg0 zYU(%kDS6v$Es$*5-qs&md&a#xteE58Q zhF~{8YIGrmnsSUqG-fFegTBOaI#+DktD$3N5~4#61HyPAbVy}w z?(%Lk27I6>y3tgpQXk`_jO5h$e<#g&8wNK2s%ndghIP;6;+t-+Fv=z4p1)Zj$QfYu z&|xvi@n1NHH34so zEM1QgjGfVm{e}`QjuCVbZU%jU&tts4Pf1P_f_FB+6wnIdc5N7{|731(Imds-1IW>U zq&>EVj)Pu-iqcKs7gS?KsL`7Eqt=t;ABw8bJdgVYz!wB)Nl{K9J2sa@pT=O*+~tVZ zd*De;fEY7isXL2xj3NPS-5!_p!UEfI;>|BjT}678m#=s4b}wGQuIdIXvg4t`&l}>O zH0yS=3t|M5F%@|>q{YR>2@B{L04?Zamg|bc=3J%rJf87CAcI#X^#d8kqP!JYU{#zG zFE~ssW|j#8HzB~bxh)+N*m?=5m;SNWEuS@Vb4xXvjzkN%xVZSCu~P#f+|F+S%hW6I zmW!gHB{;^cgo>Iar0AwC#`w1OPurR~*S6VF1XTqm8xd_u)-V$oCmxlRu8(;5$8`Om z&DL|+#rJhN$O|QXJz2KxNb+bH7Jj=Do+(wh8h7o=cwaAff#=_TI-r8SO$StLsg!`i zeHU1SR^zF{OB#UY`D&{Nuy=;%kHK91v1~ICZ?n8KP?MIG(Eh90pa+`EZf)gscb5o6 zPhIW%a)eJvxE1h`nGE#vM<_RZEbwUoA-jgQW?o&^A#GqEz6(TSmEd;^y}+?kTSCi6 zdUvgn`7usdm z*x^X_}BPm1z2mH1zhG%?cBE5eTl28{hc7Blm+y}wE}Tjt&%K+dhmx8OvU-Wn;^>xv*@nWt+>ky==R++A@~yWn1sr`(gXC53Tz;@ALZO zIDQ8jbZ9pK(Qu!F0jN(>;zj1$>Mqp9*}3%fq574s>tUuEHdm>%&V5IT!Y^@+zL5|mQJTrQPvoa>fkiCx2NZs1n$|@Xa?7{IL0!wQ!=dKVBOMDk? znnIvS%`+KRBGz%t_|5xL;PEJM5bVctO&=+E^NUs3;0|&|!6V zmp57Jm+(<#7lG%*!4VhL`h{)oad#AerZF`!DfC{y*dQ{FJYWb`aM~;-+QO%-OInoq z;TEr6fO9+dQ4~HYjzHB`IrDhgQP1#*_B(kb4pul!fDk6W_=?U^Mn)BnHke5!XR+f zj`D*e7@Z6;<$p4wOQ8k@Vk3x1P2jI-1jWHZ<49XcWuRQbr(MvH;8l}6wAU^|7!s<% zKEQD_8!b)iqc8mzu5^0nR_~gRsc7|d2=2Q}eeg*>o7gyEVHUdozV(h|UG|w~e46=F zQp`8tkWEMUjCnryPYvlWo0Hu$RFy~C{_l|8b4JKlMTWTopH>D++($@-*B0L z%)Cgjm`0#o`ANIU=55Bd0je4^8=H!#@{7&P^gP~sbBM%2V$Y)){fDWP)r%0$jtz{; zPool01M&-w zO}nRy_Qj~PgSgIv_Fk^Q%vFEwZ7SlM$HOh2YM4#f4g8SJmkNzqSv!zPhxNPS^&S6` z_Co#r{as8a$7uR@TFr`Q4@cI-BeDUeG#jgqt#tvCSAdF5@Yt&9~ ze@ARe5l!A@UqN6kHDI_dM|WE66g^2!~o7_nPR#xPNDcnVY3UZqJthls7G{ruDh^ z8%bF3-rhd@cVGxwR&VOk~bc(Jw)b9PqxnsfqsmLDi~i zOgtTN?9W~ktVe1u!S*9O_spK#RU3O^1DB`5)4dU4hnB^Ib>(ifRk=8$1#m0C8_713 z)(ri;mVp+QDxHriAxu}rWH>n7Dqgas__HWp8mUmcKtoX*&5m%11ixFht|AgYF*7q$ zMYv6{&PVc(-v+uQVJjZMc0d*uV`|dTZ39nQpKKr+Mw}X#VM?ng2x`|d6eler58veX z@ZA`QdfQqMx@@D%aY7|t&?d=%{PGR@P>0f>$b#`^Ek>^Ib{mU3|Ny|n7`1g5>OC$hb&PC z#q1NC+MTV~zt+^ZEC8=ij3$d`Vd|HU8DY*Bvv)ZKKX&;h1ycomeV*S|ndqm$iu2fg zkyP!q0XCKH3%`HEgNr3^(jpexkXIQP6#(J*|Kzh;V>p0PQuLhSor@l!PB7zH<3ktX zuFq<0*LspGTnhtNwIXH#Y;`(Ib>9BMmvZLn!M1LkrmodU3=9S?@k|+42?<3AeSV0N zayUlp>1(R4D|qcsK6l+~fh`amy^b(cO;SQjiOMuAmdcZ4rthp`Wegik&o7exMv;9+ zEN8@rGi4JW#xvodU4`|Iclo0-`dc_#t{f7Cb4pBz(1bNXj-UTA%xQ}6u=!!GPF-IY zY!f&Ht;n6K9w45dg&|)#?+Jd=aj0OwGamzlM2Xs8i-AwV=GbUrH!6_dGoB741vguM zq88x~N{yS=&4}QJYZk%B<542EWQT4cqJ;>-BO#XV+)@`0(zKmhpV$ms%j9t>)3jX6 z_=kndHo~jH<-eey)?;S5^=o#`g@4t*yJaRmpKb1PzK`*_DsTmHn>*j;YYd@yT~7)p z<&w-Q8-tXkigfZm)7gW#aJx0{TUIu9TWkb2Ce0D8l;#srbps$z)eo*G4Cp(r*C> zTWxhWs9wF*>KAMRyS&_F#dgec9X`#Cd+~gNI21rR;~(? ztc#9eK_b8ZGBNbV=Ssy>tkh>5kM}0ri!-re3%CwwWSA}s>7y6t0bk;|JvkAS7UI)q(Oyj`_0eMdd}Y;0^jrV zZl8zDsoRE31VL?q=#BCt*tbq3<>QN$Vc*4fm0hgqFOV zky3UB_oF5u{aKK>RYa|HJ3aKev^18uzGVnw@i9g8hkZJat3=t)h{VsjKBt98NVlL` zjlnKgKkYmRYNbJfwU;s14*SR1ug{=I5XjZ4M6s;ow+3^0H}Do{EiWlCcltHd$?^J= zVAXBhbEqYNr-Arau4I|8`E+9RT=hPH@)PVLv-F7qQ{j zACgMGc524MGpKDiB| z5XNxfwFl};mkDh~Bq>M0szVr1#~4-gj&@pnZ2yn8y(|pgzrb~YOY39#&0++Xv{Z$# z44l!=vQG87oyj`A?8BKVqaupR9}<$&La>_n0_BsqQ3Mr}HuX7rh|c&bZWpAAyl-GyBIOu?H`v({j;FAP-UdXMu2 z!g-hlBpgolo}oAAV9u-zeTj0G0|m$JTZW4mkQ6QQds`J%{%GbfQ z|Nj@ypD3aa+K;DSQ@4yqH5$UCm82-`i)0}!mu8)7CCDR%-UrvTyG+k&0(-=a6?XH) z%xs2ye9&7`+49Y0P6?N?HX^YYI!)3!iF12xCM0u=F4KgJ{cp0+31Dg%u=#nw6<(>` zY<97OL%BG+)^7gqiy&Q|6a&Buly&;cmuCln{+0iP6~iGVyYNsd02&?=)M;$gHCG}^ zOet>-%luP2!8u_ljdZN%kAu9R&9a>5NKpl@uq{0|oawmiPhm{ZQfay^7rog>klXHh zO{I!8R1!vK>^n$`(kuuG2?^aljtNcsT@|yImYRL8H+!S9>bo)DG=0U#PALD)AlY8a z$$hL};=yzqgN|ZvWN;l68kZ8HMHK!8pWsqd(a*{R8#oCeV-+MfrR6t6yZ76f^}P^M zd}{V8u}%`o`S%fZc?f~{livmVT>U$2QgG!+Jj|9{e(9RuiAza~B#@l9*yB=uk#vV$h z6?l(Bv)B#BjiBj%GA4h^mO{a^9*{L!(hv(K5sJ5>hMba_*vHrKLr`5=@R-l7uI_U( z#{c@MNQM=A``TNqRt)bGO*?`vObYL}JM6Wm5zij=-T^L*e7BVEL5T)7i*=*Tu|_=B zhL4>r4XHD}mmVBAo(5L&T@IYVf6$9Lo?!oC)dp)OvXy7aEmqi&TAAjMWz6>HnZDCp z7)^ghm9z@Yr^~~XFIZ>W3)*7E#bxEl8gTJW;j#(X5`ydtKvZSHHI#0E*6_16Aa2Wm znF;-YyJsAkR|ZX;{rqabkLoMr=!GQ)B}vgDVLS<6%F5wA9KDD63$lSKj@4FJ^zWwr z%Y6ZZR#T?wBgu#+yGWZUzI4#(qg*g!qRO4hmVr=ci%5Q7-$*6*z8JSyix@@_20ZhQ z>sQ%${blyA0uP7RGwJ}eWKl#;rJmH%N+1!+Q^i@o`DUg?rJ|zRMIh7c^13H_%f> z)!EwkFU-6LqB?trA3cu{H6hVXo)agKJkPM+B?2`{MM_o%e=)#L8NLQxGa{KZ6Ym2w z2fHE~sg=Sc|D-$57qb6VChN6%w_J%^Xp?iChg zgaZEr53@Uzs+95n`pR~T824-*Nw(0LpLTzH5`S%%KlbJ6_c<7}y*>hJxF-`=H@=%R z@C58n6?K4G<>>(UuQ85!oP z434i4CjbJn>pJE)7=vsa6C;RWra+g-K_W-D3q~HoO zow*C*wrMw>jbl!I;^{hSv(-(ogc@k#IDkK1?T!q-1ZH*WCBXhrX$po` zHfvp-y1in12;>nR?Hd8e^(jr3(cJ%PeyU`f;~918dN_cA2DmlC!JHmVY*g3m-Eo;P zT@+POP)BFcCsKD1y9FYT;9Tzc8fM?fM9-|guZwdRjWNDh4s$G18+mEpJ|T+MfN=$f z{X<)ycudrJGG|xZyVv_^LeB&JTJJL9JSEa-V?bgUz&lVa7$L#W{qpYS$2+V+`;4t@ zHTE^@NZ==A<7eza1&Usxd{4T=J$E8sTff7q_LF{Dpq#FdD2P+-h-Mq;$SWxc1*zNi zOTd`!*~>aec-i^#@=~JqXfqg1l=S96zk4IOklgp8`q-TTN$$2M8-&ad2q~<5n#j<< zMa6X*3*i5uw97)H9RfM2KvLQ!=Az$0U%8Fdn&aS5RU`^MK&!hOY}N;c@hwA5zk%srAB6QK<{ zH8P=NFA?|1)D}@ybsnmuTFHZjVmr4dCt+sD-y~<#VL^ma_BiYx(F8qZJZPsIEA?l( zEX2{l!sJZ|@~G#1H7U9FMw$2q#!)`bL(@f@978+urYl# z`>^gu(GKCqmH%<|B|yff7B2MT2K?eNQu}LSk7YMH*WGKr_Tn-!Fsf8pKrXt#^?klr zO|i!{!T?)~S1eAnqE|*ZRntZz?pc91Zu(_6So8e{1X>K%@qx%yM>w-#b;7SSsd$E3 zLhmx6O`z=qrj)GwOD>-_4H7qVrxhiTfOfiln3qpRbAEbQZ;(GjXpj_}qlpH}$L0)) zOFI1kGi@=Bx=JptM#JQC;H%A24N>=$C^;W7ei|v0`dxWSXS~`tH-ZWJ(kCg&i2cEuON`A@yo2E)+?2mFrU4k~l6S2o&wr3$k z4lCWizLgEd9!hDhkg$7`pq-9ztA5zL*5-Hs(=0a-`b`G!K~|p=I7T%D*DO9x<kn9FO_6_SUCW}n76Bq&4_22Z#E#0T>0JrT+EgJ^Njw?+gv=y z>$+T@zeTUQ#9%?>*R_44P=Cl$b@z3J!miiT5BFeVLQa@%Dj1`1XvttH!?++WkpLNc zWh6XG;X6&sbG7ttNFA-uQ=6I6^jZ9)K@KMWPzl>AL>GAOW`Bv3BH`^D1od;RfB#S1 z4-LYy^Ka}rn(|1UIl4T$e?$$2BsG_G0Pdih{Zk2CB5DK@-S}U3C2Cg^1oCUKqW+^oVZ&L@^KHYd1Po;A6IYd47#7eW<*1Q zNOJodrZF%%v>g6biBYG><-##1(TJb!kTqPQSrC|ZS*T)y) zxEoj3yWgRtM1#Gz&n|NoUS3a2pmplBJe-~UltER{ZJ~4zc$sGMJ|h?5URD0-B;!lA z?NM*5ds8?fC#Aa$RNuP{#hLN{pFf~BSHSZf)?U46@;qrR;TMV@=a$OF4$)+YcDe(p zp!E*Z5249UZ2yc0<#_)!Jt-DTr|qGA9c6eQPQhC5mT;UF$Kf(Wz-#$#Z*sPowRojx zbSS$tvf%lrn*ECj;gG}UnP}umsft)lNk3<)W!p%nI3HXbxN^+ISWW9#j%hme05O>c zm4)7f-;sLNx}g%akp+lSWI_~pmOm_n7cb#B|Na`Iefn(=Onse}yW{ubT-^o(_K{!# zot9Ew)i%@)vMTVymNhtzPvlCw^2mSXPjP82-9__Hdb0 zOAAV5N|KoY!WaJHSvV@5xCSvm^0;T;+2c{A_Ic0Y_izuaL)6VUJXpgmIDd{%?_7IoN>5YLm6b(u{rp;)iTBnm$jHUhAqPC7(W~crZ$|hKp&!^5^_PMs*nc)IIPABfHT6ixk-+ z$_~pXUR_AFICfZ!yyu$xtU0$rrL{boTplHw%6|af1R2S zBhLjGO9#MeMxC~QXA#cdr@dyG%oK|qqi`+_k{~22z`_6Vdcq^{j5>E&{~>y#57jsG z-t#zgTFpdD7BV+Z(}Q>qF&6dHu7Ss`p&vA`Mxb?>idnnBZhTrAxgy})Ke@gfT7Bq@ zcoIiG`~R&V(EK`DO2qE}b0nezEdZ=yF5eFB@*C}Q5A4E8ydJ^p{4tHX;e4s7vSiZY z3XG{x8?`FKStX=toz&R%Vav zBTm)RT3e3-Jg{Fs!cY^PXm$JW6L8pYIqeUbX7DatO%_nw%&HL<*bCEsgfIw0nCvH2 znFy6;$LQf0($VMmJ<0-@pxvcY-PRs&k!$Jq$Sc<@c1#G3ho+SlVrp-_neQqBDi8%{4 z7XY4PeXXXU+j!mgj`?XDTm`%DT;QSs_hR@R;BVvwo`U!rOzXzQiT?inzCVAYtFJ?A~}FimnLQS>gozKeETOSW&jPIk;16$GFPTnZoBLYm<|G96KFf4e<(`w zFwqQFqOX^4UY0S)E+95AExi!SrXJ7{1Eherf#=v^CW57n&EMUx85R}qOB;PA*$^{T z9(e%)tx==x=!ls6*ERu0kT6*oW|RXKm;!BG=DhM1+^)V`%+0*wrEMWLBqZ~%GK$l* z&ZlYYu`difY=^dwSAR@|^YQQIKb?8HZr#h3C7xj^)MN;h;?B_ZhSKpqthCW-M#1&p zqsbRLO+SM;8u{~^&lM0xBi=G*0jHfU>uX&h%#H{AAsRHq3O|hARX*-{dyXEg{oh@7 zN<6H9iFsXbQZgZKG;KRV;+9hZpne}-ZgpHrDrbq7t^fjWY;c4~HUzt$zUf?E<`@LP zn&<|GU)TjpHh%f_re|}K$>foQ>c9pY$03?&8GcyNf{l+ML%0;x%%!ud^f7O<^|SsEK6q&D3TYt5 zl|?>S#8?H-51oi+L@DTk3VIS3ZOH{(aiMOGCd_JG7e~7@WVQx0aN^(Nc1*d0jI2{d*KJ#KQg=HGTTs^B!W+P_p1w78TMeF5}X*`JqP=dvPS4tQA5qu&`OuD2*D zk|!oy1WN>{H_?x^ghu~IPtK`KCaPY4SUyk)d>AJ}dX1i(dDN z?mc3r*9FD@!r}ztX7J*^N+H9Wk~STCyZDLvz3c2Gd~7zaKbsJ>#E{L-vpgUzc|Uq?*Qw63~)=)PEf)?lx$ zP65_+88pOst(@WxJA}Q?SEFp_%~G#dZGH3U>p9`x5@PF#WRYU(Zr(`94!`n5J$F}r_wSvn_K3V(afs!gzYKKj(-JE{Fx4YlJe_y1zOVNHRpNNxP-cegk!#1e= z1ZYUDu3J%@dX8W!tLHl*45w>9qXt|$Alpm-q6Z2U4Z4@%VQeI`m3KbJJx3)R9EH0( zuD1vutwZ;o4hFdF!!{-$=`;cOV@z7miewhK0$?QGrWhY=7SCx(arOZm3ckIbL%!`hUWYDiM=q2*4-RnJ@KNu}AZf>r~ zSE562Wy+hy%X*lQNwAW@H?El?FLe7W-+Qvr#>6wb|2YpvzZEOu*Hyc{F8tJJ*sQb+ zYeOtW>9IjJ&<9PhQXueqGH(4aR$OnT&id^GMNEOeA*ap7Ut(>9tSL<>4!>XbT%7S1jp@FcHcnkzHj3@4 zHp}!0VUW-fiJH(IuX)W4s{79Ob@;ivJp!24;fqVAk(hezDM!sBWe18N`)GK5*eGvq zZ*Hb6J}&kE7t6wGbPSrAghF;{9PXgK7DT;In_1Eb6HohHWfc}pf#4en-Xx4G^Ov$ildDl1CUvQcR?2T)9GtwNLHP_yLQi{t9oun%{wlN7ZD};W|J&-boO?O z6t695RG-o$3Nf54g#BURlrcPS)rdzz0y?YWOo^nS8U(Uf^?agGi2~t*$(~DZF|;@n z467mgi8yd5(m{T3LpgnYSpnr9>YE8N(}pxibn+4%1uL7PGJzUdNue;_Iw?vXrAh*6 zrl3X;K0sz_YE`KB{;r5~8Oj!do$WQRYb$C~ zro`*g&&1JJa7iY-j5=)hB8AeEwbR(a5Uir8+n-*m4h8a2Jy0&p0>aXol-YG7%+xuF zY0U!S1|c)pT^HL0ZEszcN3xvaDe7N*ii^~Ia#*PpAAI8omo~l+B^7MAD{tc#@!}@! zQ{g~Hlt8OnLZZjf)O?YhW~IHJK&?A{njCnjlQE6QzwLSc(s(LRvF!QD5tpe4q&D@XYN^wz`uMlJ z-)JOWPG zeFc#gYuL?@sxUiC6?w2&is{@=KCCa(H1jnt=3=t>`9tNOEVPGQQA7+r<5()jnkFPX z!!yJQcv1M0yD`bN;k3tR%y!yfY@Y3k)kR&G?V`gavPCr&r~Hxw?mB)e5> zD`O9jvvoIJyz^b|@BTy+ty>z=ss{XM+_z|j=F7qDICnnsy|bs_q9IU5gC!Ug+F%`z zlXTcjMKB@lgRDw2Ak_IV35fxN9xesncS1frx~ORN6SJDj;(Q%@IMdv=s1CFWdxNxA zsQzh^LU#j>It(TkXDuIA;5;?ZqgeS z;}1C{yZo02F%jX?ev~cN4Hv@*Z@<95=vA>T9AhKe4D!ulN@UP*2$ z3-~+S{!gZFzF{c^1cj_W{$}a3h`Qi&`Z2liOaG7}%j1gAsXUmP9}?3~`iFY?NEUC3 zTDf}daY{Go+_s#nb~@Z?SH|R|a+{}TspMok?89FDaj!CYN#e`u?Z1N)fr>A5HuU!j z&Q-T5lQ=n`dgV;`2Ci#Eys@$)e%t*Q)CO0Nj_$Ptx_J_eg_D&nW`+T+T28iE9b(Fh zX)WuZMtCw;?y_yw315Q-PVCvuM;Mz5KoWiXe{?^#cW?cEzS;DLBQ`KH${)R?!$Q2e zx~;WYVDor{rEyU^qo=gP65+Oh7&S}qpbyGMY}AU%uCm3SvD`&LQIW#%0Rp|VvFF<2GX|>s+LpceS(w&~Pc}tZtLy&(J$lj`Mcn3n7UZDk@@4a+#l+gmD46#QLRGuL zti3*&_c)_dk+b6GM9$mxy5=Pbp_znnLt)|S*g=ror;yX5_~7^yrNr&S0_{3QSgeSF z+|U=*XalfiWzb?lb=>^ZfRFjNq)bjuj)kA~52zfrjMc!G8xn4V`^dBBh0tRpuH_fx zhlB*TUyyo}-yBO`Q_0wb6^pPc)ekeYo1?S0uTdLtoCMkFlZ4Sen;rZopj^_la(weL z@pMN$T+IzL@f7@p#JW;R#&bxPobUi|z?wAy)7eU1-U16D8P>9>Z5#3Ab>$RqsR_BT zzn-`@Ot=+I94NMOD=Q!yM)gbv%AFqereD;2%9XAX={be_0H_2uJ1h2)3(?W zvvqk3{ZYDNy2VhiT?x|bS6j}aMpSqVys1CNa;a%Kt@-Yj2wJbUPJ|I9+om8x%aPp4WTFIwYwT%r=s0^>LrV^hUM!kld;PFRP5* z)=1B}7Y6GA?ryf*3xgLnY_x-N6HTn2AY=1Q&DWZieeJ^2UWP{Wm~+pE3DT2suPGZTM6{4=t|}XTkUiWvS-Mdl z??AAtY#bVYq=uw&ZC+ebt-0NU{T5u2L5C@Pun(P`?92VE0N6UH%;p5iAUpnw9BL>R2w zX3G-(D{D^Ke4-pV)`QItm|C|w^G;F!5w`i=&M2KUlKZ-Wy=A}TRTPaHOA7mUTLiJ; z@WxWkBMym576O%d_H8s1S?Jl^I{YtDf)r-{WEtV4KU-=PmnhAwj z0@V}Aq(dxw*@G?OQF1dW-Wjdywv2Bfav@aV9}x8x&A3nIx47=N6BYmqZ%*HpD2R+kJWpo;8ls=|ZB8vii^HhZ z52DNsL;iCWfz&){*+Pk@*tpw@F<<)-1E30!!g6wALG-#Oo!9DbZf`aLfmMa3)&yZ6 zcG}Pl*{!Xu?)eV)#}Ma37s9SiThHuLYmh$a+M>BUb3Cfu%f7@ElhkkK;6i+SW8yzF z_^`6I^6G{yGk*DE1lvd@j2y$M0sN!73C+zHm%gsVv)TXT1Z*W~r0obxj^}}ZZ4oPI zf8P@LQZ8*1C@fJOcW!q+M*iP10wxsfAc${%9$Hm3>c%zIa}y^*95Bw=LP$K|=dke- zT&6Cq+Z)k-?eEdKx5KL7RBQH93*Jj-UuN#1K?{548YzzD7}b8?1CXYlV6g{a_Z&I* zeYmM0#oGW$InyUP|FnSJr@(;}$!x@&3PF{C^8#!zzEjF3lLmpQyUEw8RIN&hs#t)R zwrtd@Ph@t;`c2r6XS{f!uZ$I^2(`!8m3eVq^1Y+;z@1n{2Fz8ux*g3Mygpw>9wLEe z#6zI=^>Me2zfwUUNd!Jl>j?t%7{0XoiEsml!<^rU^y}R(&4Bp^Ok?VQ=-;PTe5R|= zZBGWf5Plo2W|Xcs98)R}gW&8r^PWqtwT1*Odg$9(pnVTAx#DovLO zVQzkKF5}*3YPY+9Ws(iBsw=Fo?+M^G42*^}WmJA#TYWs#f19DEg7-B5@BIgUCeT$k zmEr&&B{T#Ob8Yv+HP`w)D@ahMi`5ECg&h_RM1;*+d|gzS^ZY!C_pQdRVOCQ>%laiB z$3ISYK42U=pz4Gv-y=!AKJ42E8T$wluk#F@o0V_ujr(Lnzc5DCh4PkVP4WzNVWat? zUr&v5df01@{XKJe;=U_Vj6Q?bLnL@d<*MVz&HVUydAF4E%wOD| zA#WI@BG5uSC2cA|{xwy~=rf^yv-|KF0o?G;XarpQ=}P<6$fB|<;}QIvVrfJQm zrc0SW3H^ir7|$|HIA&WS^~jdazp5rMD&eAa5RFKkPBWSU5girVO9yxwoSs-aZD}fU#55kkjYy4N~ z6bH>X@H0r6xdk-oE zX;4`fF4_x!$YoWdhR48Q8>`zQ4-#GB`a0$qPl1;uZ%*fLN7R5qi^B(+rj{beN|rvY zGnt){mUUF^oK}?5KW+)?g;3qck6U;1D>|bQ?E18r@?BkxE#+v!)~kN zuaLT1|Cn58g2*jnR1+!ncxH8wzo2lbAl<#xs+omDu(K!FsImy#u~vL$KP41twX0~~ z>+y%08yz3_&8G>EyLh!rcX9qSVF(ILWvuh8Q?F28ueo+j0XH;C>}Q5EsyPqqeq&?4 zM^oml@t31Syq=Wi9cy5CgS!w)ww#61-`y_OCrCq}d$S|6FF~ z)10kh4zhx1hdPF}u8t0`h2Fr>@w-)(u3NE1!jdeP#ktTr0fHq)*Cw^gpA0ysh~(}X z*>hF8?WtK=oonBjNqCzYO4d$eZsyNVmj+yxP{Zp8^=nd0xkb7|6?lIp8vwGmdO31C z6qY!v11HZa%GF&{Ze#Myj9Pd;o;?fj`zd+iA+f+txjh$`99#?Ojj&lcI`VCaGjB4l zUK0Ckary)*lmwy~{cW^pUUEh3B_Q_)nAM`9qBxNN$LP1M6tJ#_3xZfo_p9+2h6b$y zKF%Q7cg9k53-o1{%XvortRbSID6D@!*?hzY!=;1khu=jK8sfKSw;^qTq9;~Z;#ZH} z9k&3pHuaH6di;sSXe(Afd#_;cGZsulL{=8{;>%*E=k3f-!(`q-yBwNYR#&js_Fl`= zn-uulNKN?pi`M2(S@+i-|DT-9%*@@mcdv;m7DOni1T&&jA1S3Vs=8l1UQQUlk!kA` z()7K|iR1?ra`O(?`|N1A?eVVt8O!2rz2D8a|1BvHF!&)D$qGSPY~k49Me!MHFVhia zR%`0x4oDc;6GruQW69{zAkwxI_6bi)!ge{D?>&89Jw3k!&Cuu{=?f!2XV^kSom_=b z->J{v`85!C60mJ2?hzwCeY{RPu~<*>}6BiCxvvK}DGmdgK*yE8CsN3BO$Mm!HghL(oXB&bj+V^u=% zur{&U`|4j`t-k)n)*82anf=bPw-fmj&0oYH*D^~@wc&sQ_3P>BiK=Q_;_7@5!LXxX z?;?)b1#?z``ooW+1KPtNFoGsQtgi({&AX zO-31LNM@>9)2spay^A1o!K?_9XS=3vYYO~oBh{68drj=WiL~g4goQt2#cG={+CkJK z<^i|0wgpP4j;d(7g=oBhQPIfFV-)K+{3V94MPQ7hMSC ziGxY7jD#u(iJV;Kv3Q$OtlX1@{Bi5ch~CB%aa6Y`^o$6TD~Ar-c@{%S{qX-#B}GCH zdjfL%5pZq7e|LBHt)il0nK?xDg0u<$MG%RtClM=MlVF!K)GnFp{yelEa5_#>U=C9@ zQ(TByAzvxV6+`*k5*twoQMtSxL)+zPdyC6yMj(C`Z&V`V*gSI}5OBrD1fHGVw2jH_ zq5H@lUibAR8|@?>B#elO8cwESw$MlO!|z_UPICv%xtp|VmUc(R6fT3-o|wHHcknvP zyTWcl@L~un?C&36nRC3(e!4G4=u1q0a$6jJrAPJbd)KM(7>I9GucZt0$y7AB1^%CvLL z_&~s8c2ahDOTxicBxRIYRS$a=RruGRo*Qs1t|~f800FD12}eim!Fx8E$W#ulS~pLq z1i`@wiG=#2i`-iUn&|6w?+DdfI)aX3l*eUyy5DgSv)Thi{9;kX%pf6|*q2aDYwR9+ z>iZng@!b7{Ys=%NE2WEJ=Zv=XH28Yy>%P53+>0EV5-_H9OJlZ!S2yzGJ@8vGrjhd= zfBuik`?qOPB9@GCV*k*((?;iDdw_ajtSM^utSQ|LpTXeGwE5Eh%i~Z8*WdjHcJp-W z&fXELZeDhl7fdMQA9IIKIdh%KZ$<-%8Grd7e^KB4YT?|Pbz*-Qvzm`ER%UpA;H>RG&&Z z zhw9)h8WF9xSgno=rxp2+gDm5Sxg82`5(MuPp22=}g|;zLJ3*xV8|8cyOc7+#S;*@( z)0pS)pEM2xA$RbQuS3G}qV+h_Wt?M#viZR)$GcPBJ4HT+ny1T$F$`YqhuDSkuziM3 z>*OWw7TmJ$tM-M0AA~)=2br8d{G7Ly#`wiW&vG5)(K#m99>p3m(6YJ4^a@%1+Nkg6 zGvL;}sfbZ~90_-!UG2%g$G1t{B5(*UpI)y}k4}ZZ;ddi@Ok1L_$BoNgGu@aFaUptB zAp;*@H+*0CQQd!w#s(73vgP+;$PR~|53P5WFc1D{#4o(&QRb(VK4fs^rLZKT@7b{D zAY3;_@23m4*Mvro&ssUHalIvW85tw9Ma_HCGrQIEKKoa|yN{F>SxeWTTsUfRM{LPT zdV${gmrOiJWQv&wjh+vkKkr}B7E+=}cMou22AAX!kBX>c7A&Q9+66AT&I9;3HLRb;wJWKm*2S8h5r5g+6$$NJuAaM8pEA7 zsBQC1)Cg~!dee?6u}KA`nr)e<)V!qG_sk+0WqX)tILAnf<}`;rB4pSK7IJ?J_X6}yhve;Qp;h2VmeC(#q< zuxBP#!XfLjFLf36?{h~DB(9|Cd&K`$V}ON74?!{&aN+-<^a&Qa&Y_LYdFN~+1U{wX zdYHP$0~3qUD5iPiSnfm5dz~3#9M+K|8`VQPo^or=ca4S8hS00hd}_+Hy&iU5{p1sD zu{T!Y3gP*VKLQiwiJuBJppzgoVzegcO~V?Azes$K&Tl_Zjx+V@PyjKPhAfHeNJWT5 zcw|2hZ4-^A-yLc&^TLuB1+c|I=V4@i?}0nNYAiwt9Esh7-NPp(s31pkcy> zU{AQElPV%Oo6=$n(ez$xU`iN6Y|60aMc|RtZ8;VlF}QhVJ!1A_I&%6bk1|7T6`d6l zg*{%+WpDviLc6XUB!dQ9a;ZH9&H&xE2iH!P%OX! z$Qf@HiGLEQ^tl`ma3y*3O<%QL4x$t0!KgSMQGEC7nJ7*M=U~lA(;#(Qd%EKDN@3Bm zgx;C~bKQNe+p;ZlSzz)Iql{FzQP1n$@!H}_9x%e0%GIC1HZb=^T0Ta>8X?p=$)2o5)FAm#9}n_4 zk#A!yJ})sub`BqO(Kh7f>{S#o%Iu?2xG{(2!$zz?a5|@?NvXO>GFCl~v1(>P9)Br) zpPIUkFz(GE_*_43wzps6Xkr-!i=T8&qpG+8ccq@<-lr%KRzqp? z=rQQ88#>}D!NAz|S=km@g3?^Gs?x5zUj6J#V*(1Z?Y;@Q4rK&sD7?}{I;IGkIC_)> zRHZ7YkWCFGZrns0U-ha7!S+3c@4A$0Z{BkNiu<)MV&U0uTAILl)}wQ1(u8G`TGKiX zS~?PhsD|zF?Q!!xjPM^VHOCy@tcfvEc;226YIoj7(* zOHCymX3KOHQ&Fv+yT56!2Hmf_VoMdsEG(dfGGgF_m(S^mD-PEHo09G<+M?{U>G8W^ zWB1w;zb9h+W$g_ z>+*5a^LW6jQcZ?-NL$#}_cN;(iVAGhZ;1l``gk9~v$S9tdj1h@6_&z8soIlOwx?zT zdE8jhezWG1>+wUQ$t(qGH)KAKAG9*nj1GmE8e zNTt&gh4*^WBI2L;B!(K2olIW#D~`HzK$9+Bm9qGVkuUY3Xz=CkR%w+!2(P<@Uy7D&ci0Ow@V+Dif_>$4v_x07AmjV9sUKrxces-2Q&XJhgR7E)bgqp2(f zG%}^s2WefzU8+IXFTDph9#Zph7ZY7jultp`Xu>;Z|6!F=AKAYU*!gPbHsyB8()Ka$HrS1c_X*foh2=atd-7 zgXiD(7=p#z5Y$3%qEWbJwKC5tDy}T$=f1V?c;CnEgdXuWJbhBNPAKImLJCtyj2J+F zWVj=z=NI|dQGGQo_PKEay1d-lN&T^a^`Y0mFEOtTcXl!w(jObWc5#%Zqhyk}V76@1 zq7X@rIW|N5EGb${#$<{zL@#8W-^mOI2%X5jq>-}ev z#k7J2_6ASNge+qAVe17Q=5eZokRyEKRUVWGckJZP|K@))vG-&$8Pj(jK@D7(T9wB=LnmBl3EWM9pymo9TM}Uy zkHj-h+cxv{y{-6OaT58L@80lpJ(>Pf4^3ay5JFJ10bsR4Zfb20V>Gcl)SDbS;8d?$ z-)+q~x#^$^Fvtv@PLJ-|0I3taQ+U6AMZNbU86!DM35^IybOh@l+b9VjEyx(YZ~tz&KI}X*SEbx ze!v6K(B{_P`Y!j!{oKZ#q#W$v2p62Zw>1~*d!%5Phvtoo`MEbPE?zJodV3+l^>)?z zmOfB6WOY!;^xab~N(QTpDNjpjF8}Pm^7>c*E`y~bXx!-Cwa2|NVgo|yZzF%YJItqS zDvW>NL#DU~I!+$62Z;_yx~O9{JnFg6=Qm#ST5_F|d5|I|FAJK@CW9i}HXFre+eqil zNYz0IfyE)Ee03D7EJ>0qP1A0pp_b}3x-n!Af-8O)1?3lRTzKrEPXAM7oU-N>zShJ! zN8);$k=zQ378?$?aa==sa5iFXWx(=cAMZm>SZ#)ysOCMh?Fn43I~wdW#$XbKZq;c{ zwuxg!QRetECz9d)(Tz5p)ts`E(O+Fd+LC&#K$K{$=@*XDMc~I6n3AX-V~oM8HD0UEjcYdKoelff13o8qIXhA~>y#b0Ka?{h#4DvRgp;s!7A4ulZGz3p z{vMAIpp>B99>Y6Dr_&itPY*TIVyWo&`&DZxMyi;&@+_r08&bnP-vk^Q~h@i zK5%a@;bnvsdXtu>QsFftMRf!AI4?$%wMD;2Eve&8KtA68 z{`Yg$XFnU%`gKH}XSX>oatMs`t|wloCJo`|looJWyGdeYxN7 zBkS#vOVjc1a0t?ZFTDT# zociF0@-r`b32R3eNaCPOU`!Cjj-m+X>}DNYhWFcbe`@dj8;5$TYinyPE=rUVD5YKz z%hm48igQfRb{L3$h# zBLsbs2K2&Ngs74;LFU6*kFX^|M#M@nF_zF@E3sCPsEERVQW3VWG^Xc*jat7TjcRC> z(qCJnKghsqCMH{eq82x?*0Qv+MwUD3jUb{PmocU=;liysu#TCei8GEY%h8dbp*7<| zl6nV&HA<5qWM~mBN19KLl>LN`X-~amMeC+?c8_FKB zH`qLAV%5@!Y!DC;=RJ{@0r|Nd1kp=#>&VS*W@vc_XYsq(ZSl8xXufq+ra~r%Md73` zFhw5`)%otfew5FA_~AV6=bjfhX+mL?z@dYNp5I&uNrDT)-HEj*!q@A>0|k9Bas6Hs zdrqVHpZd(;^&3ibY0|-VkM~fm0@UYh-}S zK!_%38n1H@aku(eppYWG3;Dp%?Pm0Q1=c#!q+TWGbzRr6H`j|vV~aqpk75Ry!PtUsX98P+`KM^&o8zC$r$QY4u8f< zhmjDK?im_!k2zH(kl|yLY9Ny_jIoucxd;eL#m*AdkVtCps1CXP_b}DrK>la{?F}r? z9|TzxLE1#OyXM4r?-HB@7@kvS7+GFrS!-Dz1#S#jU-Z0utvsFi&F+g zk60uWrQxH0{cbLK(1nce*~yyAaEYcYJVJTA(yZ3i`oYKrGw=urNkoLimIf^ZLMJF! z(OQh7bRL)UwnHliiM%*vwl#LINxv&~1Uj!3} z18Who6GJJ(q_xe+`x$Ynsl_qIdX$rhY(U*OwDKg{GaE&S)ecSsS)i~#6t)7StgI+T z6|yg?{}UVU*AD7M51DtF9otQGdq{et(O4ZHAAiD->oY>=Ds8Qe!q*#lP#bwdL%w7b z=}p>m8x82QETi3S(`Yp4^?LMr!NayztBqVChNptEEa~_AVT#<^kz0hdmf6`^TCG+! zF&ZWb8;!>3IghwT)nloulD{)Iyp6B9LQ!7OO2saDFBg;o?CGbU{`Eb3_B>{MeEgS` zQX|KZp$o?F{TzM_Z?r>gtl@iYj2XS|zx~_4#VjqMDlNj{f*GQcS`E>&6Q4tAu}CC& znS<86=RNOXX=#aOvpLeu8?Kw(=PIS7R;!Ir(qRtW_?qtM{fpzcDoUeG#rBYU{^K0ATCF=B z-*eWdWl20-1HE4FR`=^Yxp(6@=JLxg=kuTbbkG=$B21p6wdDR_X|05!v?bPC1nYK_ zMOh${gd^8n!=;yAN}8r4!;Inn7~Y$P`(oq%w>BVb!|OHFvK{_T6h;3jNs?dRwQJYu z?RI%mt{H9#GRR$Ax+cKIvorKBi-8L={Li#hu=Fsr`}Qisnu%F z8yjoXJ=}K2*`-;^C+k<6a=>zt(Ju#h(EueWtiXDYkdm0PS|o0iES+$8S+%6BKv+XF z5tO|gXAFso@nu0IB~c7YJCw3CVu_Lhm-UbeBI!tqoK`KO>~`oMJBBI>nkoTl5Gst{ zJLkw_$I8kw1Cb-0Cz6W77I-hm+9Tu5ujj^pq+jIiUS3?+?{~>cM}Lq| z;)1lP_hC|3XsigF%E)_?Bw?_!z_y77SAOM-T=9jEgYILCB2dyTI1gnRypDyA!UV4O z+_Cn(yK9J}xMz!0>+dibY(h-JH|*#i1{GwH)>~xxfVJ!P@uiP^gl9efg|xBw!Uu0+ z@7XxPHA0xg-9soRrNRu>NTp-4ky2OgUKz@n06RS~cHn5YON<9^uvUcj{)Ut2@cIY> z*KDN!BS1M75PYv$pVKq<$nzWCR6?>W5G3eGk)$!+mlV~&OZeG!h(rhl$Xf`YhoT~< z*{We(K~aW58-xQPVTjClRk_Q6k_stej57=}%UWkZvsDl4Mcl^v_6fGsWZ8gz&tOVH z9Hl6!FlCV35Nflo>&8S~t4Gx85m~6j2U2oL=54M5S~|oCxAX0A)ZX zEB|uu?S?7dd&ggSh{T9$smM{j2%pVxwokf|C%X1fi3Bh+(Yj}mCFvM%6c=@i^77Su z0dE66XsB-^d_YWxbb>o_f4rlcs_%69^!wfiM~{X-krza<##<8*N`GAb5iX$p{x;T= zxa-x<-6{x2Ic+%2y&17|$1#2UEpYmP@>}6ZYbk?%MlF)WrNiYN257$i(f`LYp7~V9 zPdlA8Yw##)Whj_3Ii!+_ij!xFOo{RyZA)V5N%6!=KCP+MQoWX}w$l1ivmP%^*P;SW zzSoQ9DJZ5i2hH}GugHV*EcaHJ(F(MrE9%Fg#I73rav%&2hgTXINy@Ss#)%X{b@9To z)+y0SA(h6Mk~P<(*{EUaj)7T2wKXycvWj`$r`vOkw`TEPbNKLKYUvctI=r_eNklFL zr7RgJO(qIDzK@6^PzoI@axU9VI5V%5mM37fXA-&vm;@gN= z3L!|+l&oKFIxQzjLa#R%o$NQB$Yo%oBdR;r+S(cu6JyA#Z4}2b*1%veAWc*H{XVT$ zYc%~d9QLUvDcx=tj~P|qh@lINui9h&PTlB?G01>sylif6Zr>?QajzGX$H%rvWkh64 zHN8|qt7JSbJhfgydu_nXYPUBt7z~=FaW#*8ds!itaU4^Y<>-X?p$~m1WO%K?6qE*% zSdv$AA6iTDybONBN@4AZeasdJ6pCKI4~-@m!$&{*QJ(&D&wu&&_;|M{U2T2n?3T!p z5$0~SsFf0hl|G(nH}>ypr&o{Pi&9k0ge58E^Q4sjG}@;>Vv5}`2Mj-k=%SJlT1O;F zLRmP3@V7eoyxs!eT$B!r%33Yth>bZWw7S<%*a2w>sFsi+KY8z2Sy`zH%G(aDHT_;b z;(`zJTtHJ8p%EiAR`}7uTL>{iCymJhuPm)$&9Jj`$D-pySD|6|XG8%I;W@}3Z?l%y zv|6ouREKa*$*!56-0$nni{iEJ-dPCYWez` zCcpgo&%Y77e&{HR0wu-$0-1e2k+y(@5>4qEl`q#hClg~I4Y`7nr zGSJkbsN(!t8xACb*TiwWT94z^nq+l)ymg?V-ENmS2?`j-n9#-5f%~0zbK
33(<3vERK*A-v885!LI{nJ5?{3t8TH>4JzHW` z#9G;-R53_R&pB*Vqh}p0EJ9Y0?Fj7*FHl!QCAKgXuq4O_1zM*S%IOhWAtOy`9a8B4 zwL9n+B`QhD20gs2QK}e^#4C-#PG+ z85KkKYoFot&i;KDoc_p9-|-c>Cn%4#AIjR9h3vHL+rECyO^0#aB^s?sIu!@hi{ZeC z8@<2YgYXq{P@yXViFO;_-n-jQI_?m<-bV6RTheIM$ohj#EtXMHxM7k~$*?A!4@IGK z22YrFZns->yB&(6K&e0jks`wT^~rL-L5SU3U7>q|)s-Htu^L)Oqseil6uxrA5JdEP z9s0czDZ-)FSA0P!1D9EdFfA^GKuA%&hR``^jnxstV_k_-vMQ*9ZJV`r2*=(6iW+BZ=ABXnlHz zzUO7KcEQrFdb~t^@?M_+e)D@Tf4l1Ru|*kB{VLN5>x1TEwZ_aaFIA%aUU*OfQHly9 z4Eru(`15uh`DG)wYMrAlkqAuqnW0Rozx69FZWSzn0abn%a-=z|hKdi~p{s$i5mm;^ zDmRDqWxdDxATwuukm0iyN?W1;=PlmU_~xfR&NH9!(@Y3KS$d=u)MdnYUOX<8*WHV#x>)Ye)*Uld~@{albG6yDOoRtg*@Vxg36FRmLGFYps z7-0l8-$YuC&7tPT$;wrfss*Zm6cn~ZreGsSIxbjV?qZxrrEypfrN9D0)EE?o{vali z9!!Dq0;LtX@mTLrI-(SiBauErh#2DyQB)%{kjfglhg=4^GA9CMaX306y+vA2EFD@{ zB4_X>$LATT7EDe}FgHH-zB6lj-=+iZop~eMHMJmj&(6jxcJKVk@@bvk;2)f`pe0d~ zRuyJ$w{b&408PVO5eLx$Rsk{~M~M3@LSew`P!aiTMdqR~{;@u-C{ zh9pTw7uq_D!=jW&s_-~T5?bb^3%-?BIHU@L0dcIT$I;8Tw_4ZkZ0c*<{P+l&-{zuE zDQ42-;3CO_fz|+;*??xG$UgV!Prik+{5fpZ@^yFWjvqd%;Q&#cW`=^{!&{c;mLk2@ zyIbm$S4?#=N}Mhvq+z-ga?{thc1x61(i1|E5A`zrdf_N^b%PyF;#IF>cvvx25vA0o z-V*{aCOq87KK8NSxZuHerl7oUMVgB4#KgqoGPnAvwO$WVYli(I*f0q#@Rbrnr1X9M zH4pQIF=q4{1E1@*r+7t}G$hY+(o&;TT&<J87|$cnyviVhdO@NRdUe7;@VZ^ zpHevN`qa4_O@5+8&^)z-`_#~Yrs}7y7lcAo`B4URaF~DnEsMu$36D~vzm?P;hA}oD zx`XzrTQ4e=9^F5OhkhYM^>gA5yYUXkRaafbO*h^2#}7F7+^;B=*hXAis!LX96ZUUQ zxKEA4*Hh7*o0}W2k4?T#(CAT(6h#raEfIe6bl6ZuBIGlPcVW#M z6M#D@B~nQuDe5HMH9^p~5GW-md_?X9#tCAt@iNGAC?l~x!g|NR zNu;X=d%OY(ag>m$8paglGNn`?gvL3I6cX3Bh*+a?pwOtYO0pHgVG4sA2xJlkevkAB zD{#gjgu{vs5{QJQ0ch{2IjB2N9r>tTV`EqEYN)0ATB4f@4lV`IrJB9D?h9Xd2y0yw zj=0+Jxeyg!;<$7)f*EBva$1W-Af(-dPL(RcS+R~d-WunqTDW*0oCG%6C%cZ>@od6ja( zW&iS(XZ@GoJMmgndjt!N*S-@C*0_0Lg*lKKdIN=^Xn|mJj zI-XnWU3GMD2uX10@tRu<6?t)-4as^lF5ui2r6p-CrZhR$xT>Q}nDSks=?K%lM0GQcU2K0-@PBdKE= zB}HjLc!ZG%uW?Acjd4E4`3&hz$dy(hj6@m*1BuH7BG)8Fflu&0LRf)v4p}HDBq&cu zmq;a1+E=PQhR9iz@g&aTvpx+inQ1g0H8(!C@8rqkc006p8GhI%$@ElnU#s1m?PW}g zoCoK*`G;QC4G3QmgoM#5=uCNu)-iD$+S)?Oz!`F46Q@9_swFh6^fzd?$?A8)X`D(X zL>=d{vF=0>zOJUlD#wc`3Qm*O+YN|8NCJ3}QlPbZc{7cdrdy2zv*T+2m|$t*K4xHl zdvW>Ym+u|r7SEkSGlmJM(g*X7imnS$BWMRWR=XS8@d=y z+*=zU!Xc_SDkj%;`TTlsa&+y7T3-;R2fg={#*B(dDP=g(;e&LH7kFo}Zu4UWhaip6 zn(uw@dvuousyUgSaDtr8h^6W2>5~c(pRupo|AccTVyI)f0m)TL;qT)d+94vDBnka~ zA5*$bCFEuu%Uhz1##oA?z_|paWW}KH$f|WcoX#I2g~PSKaf0%A9nE2=p5Yt}#o&j}>vjg0N>ie%Yk%E!*FEG!%cYT2#rDbZ*0#xUwVb)R zqYF!WmpZ+j4q~O$+okuo>iv;YBDJIfda>3LMS<>QjSa|i;mPxWPUd-zRM95%J5Xq< zOC`f&MNx$HY?|1XvZ4!GgTRgICS6UJ2ZYjZQkGCcgbuT_!+i3JUAuN&-HP?XWGWBt zPVSh)`UCZ|s;Q?1S}e@A*q=H5536f6x@(Z-1G1u|7&uA`mf(qylH%SJ=D z!(!KqFgN6f^`+nJ1Sw^pM8poXGZa->Yl;D;`tRX^UMWrC12S3KstsN_KT(v3iq|B) zN7lldwS|)!yKt_2sz|t4FCI%OyC288g7)~!#P6~DYS|i_&!3oYQ1eF zgUVY)2!&8s>#)Y7^}37A@J3(crs85o>%b}&@5QL_T<0=6e8m}8N}{8?Q1ppgM=FOy zNE@HGAECP&-=}*=0xTNo@C6NDQ|(J9Kw*eVnA95C@6*bB`mo-wZOvB~Zq<$?d- zzWwE=7A~~AOB_iOVT!=v1u#P?Dg(}xL@L5p!=+L~T1)8lDmuIeQ9vZarH&++4Kk!f zwXX1?kz%R?yesX9J~X7A4cpRE%Jnw27n=?_!y+Xq!{2ajBhNkLVO!O@r4RH>YPOwFKy7WnWT$6mdRZ|!C|xr*zSip(f31|l zIv=KuT+o!Ni)f@Bx3QHq+!#ZCq?E(T+ujO_U^eliLZS_A_@Wq$s_-ml!A7t$(?NBSXXHfDott2FsPyx zv5Lux0li)*MyICQL|QO6HN&>;vrLY)nVFs#X_Mx8&QhmCcWppnaA2P%ekT`_GVf3D>WUoB1=~y1*oDJ<&}Uy2D!82_qQA&jlpTiR;Dt;p}%20lw}#- zS_B@AAkv|xN)kmaQZE-}XZ$CUnqWLq>ky(b z_@TXAb+jJjCB0q`V+>iAv9i_~p*t%pD-7~-bi*GT8>8811i@e>X}8;?X%KC$*OK77 zKIB|F7ZO^cG2SxB4PM6FbodD0zWQ3e{BPgngO`1je(725mw0E1(-vls0SZgdFRW7{ zoX6tPXa*EG1WpMKc31bF=%3!qu`7qWpKJE3cQa- zJkDW3xlz1)V}a>>ffNF*6AT>rP9`IdGCO9nq4C)2AlCcxs)4C(_Vj6o;pkzCWd>zJL`mE$h?S-$q1oxvMxzER%xO=gJ+(BU5hv7A_2j9E=wN%& z&o^YWI-O1z_vCB-p^R`&M%c-j^Z+yIfn`tyv`R~if__Omj>wkRnClLh6asn$?Yy8V zCDu^J&1SUZ9BSLPZ8faK8W9VGZPr*Ci&=;widKt-EZg4Jnj+D3BT11!Cc#eKU82$n z5Z!KfQYQ&!gJ-w3a7X39pfJo$wR!53AI}SZ?q@mw+y~-{0o`sF=N!G2wQyq=n>ie| z@)Ookg4UXPy-vH`W_)TgB(o%;*X<%zH1c;ZiXw=-N(nZcu!1XoBH~%?T{`=MR2jwO%QaJrtHoBHdQ573_ zaQ???TK3+I?I~Tovwby5>!N&V@!YcHuY>baiTE*@^8P_>1UI^K47ozyiIHyQurTwt zflxy5#og#aB?S1Y(*n_^-`O}t9*{zJ3&e7hE5$vq#5S(eABNrHU5M9=jg4J>#+?4j z825e7AZG2otgVfL3w+zW835^usQ_?EhGlwfu)%nYK?@|#m@B3evfvA)5L>!Ba|-C|;>qd2T& zqCoUKxy3xKTl;!yIMyr-Cm_as5Ji|c(TMKQt@%|JR%NjkRx`B+zWa z>t6S(Jndl*rxOhzOWz6_JW8WE7n zvKO9Lzo6*%Fe=>i3lp+&Y66E-Xhl{Iu#rS25s8P`MMOSK7?!EPJC71I2Azy4Rbu+9 z&}j3xb5G`y|MmO4;Z1+cTmJ9m6t<5}z?24OB2Wo^SB!4TMg{j6UukUAQ1_d}Hv`$s zw7g=rHn#u3;^Jc#vh1^K-h-@TyrlFx_-T8MAVG}G4c10OZbNQrSYK|0JO^R;Tjwa6 zkr(`}>3N20ER@6((C;7trtmKCJEbguYI0jRbYSq}e2GHi$}VXH-u6j#kiozw2oYhe z$H^+E$h@k~s@GXPAMZIy1_!C`Gty+l<>zbKS$ zVr+(RmME@TQ$it#Fj8TJ#tVg)YNJ6B8!O%+BJBN87Z7An{2L*!q3ErK4#iMs^j3y2 zfYi1^OTR*c#jp?_4#A1xu%yFdpxQ6Gk{#(2Itr09vYfwom92 zb`LqXUOaC4>?KHrD}2!N^#+L{@+D2+f(Op=Ki~3SIQ8W1^s*HeZ@!AAsL`&cbh4b> zdXlt7lt$>%W3rq?S#$ymNr818TBXGn;C%fDJD^o7t5@#_f&D>PqeWT`NIFYhXre)| zg{|3fCdz`B{qi$8|NN8qomc%12XF4-fl>;(0#MaHcMfG0k@HX%XfzpyMYF~cmo>L- z-Hwm1hb&N_*6X5OV=OUtt384B4n=F~HtM7*Ze8B^UZwTPtxJ0U2nz3C5l^rIh56h&lNhEj@Jtu`VG4f%-HT2@zA>G%61-fO?#zh@R{ zHT6A27`^wnstq~xcpnS~G#U+xBAmDfgLM@3wXc0G&wcKbiIbp46z+%`Mp|BC6T-9s z?XmZ_b+WsWES<7v$CW#G?tEyg)%v9@%SP{W6h$LV*CEoe5fvM94UI7)9kdf6ZcE?s zL_n~ILh(b#osF(O8~uuh`y`5@px#kUxDOAfL*CxR#Kg0A?b>zGjvd>s-98bY&P z^+#_5TE{s@5-YTZQ%{}aZEt%U+qZ9LX=y3U5!8cQN|t3rQAD%Z9Oct2%SL%==nKEm zZRMVL4N(+r^4cEWCx!|JNs_R(x=Iv9EH5u};e{9SSAX?aY}+n*r{gtaz4{w5ftdMY3Y7l^$kIOvQi`0bazglGT6V`wKagJZ`i zijr;(G8{x9K^VqR%^K7cZm>qyT_GP>#!DC%DGaIvqbiP-G6DH4tM6C(Xca(-tGH}K zeLJip77>vR_@+~Wo)ly;kTfWqXKZRGkxv*5a$FsHD;?T>#pJ{Ur9FyUD|y#%y&8Y@ zAwF~Mw^$s2B@4)xx#b|khQj-Pnf$iH>h@;(^}RB^axrRrZn4|nb7XbpvwaV~mJp|P z3gIcNr*D!V>lKA|GR{)N71D^OlqE*!D(wZw4>be`DuhlBMfj?(gBn7K@Zm%H z;E^p|iF74ugcQ14KdX-($0NgxZ>8lKIF52sUja{m7xWx_6r%cZg7y z+EVIhglI|>PPocb+oP(-k;5TZQ%aMEx{$b%d5IXzOuIPa^2n8c3;iW0Q; z#J!_5yeCalW?CsTSO2}`b&K}O^IzZSi{k5)M?C(QTBSQ7A2~%q=S;kO2!L|E`wjRa6y6L?!-3Vpj@&n zr~;EFl+6Z8#zav>Es8m^vO-kkG$W8&FwxIxh#u$dp5-n7?LYIIzx!w0cx(lw!CF(H zlmQ(VLZOftA4*H>9Ys;pw(iDT1?5)U$73VfHssulPS_q|VoJ)<&p+u&kK-jTc?nt8 z!S=e;2_DB;#r>)^8nk9+puIZ~F}1^)7Fj2ySnME{yBK58;_lUC&N-wS7I}=kPLm|1 zH9bu{Jriz#J^}0K9vvWCo`#StEiRHaO7c9XR-ffh|MX9J+zX%0O4-F)hj;E)(WVVf zOY!}A%}m!Rrt8i9H%~TKYhC}K`PI&83%#}XX5B6%5m8zPl~i_x0i_b&jwTSbmRMV! zP$=cbuZNt-`+8Bd$<0TKk*(ekH49Urq1EP0F)|R4Qc|ziUp7;(@1K}x&X32;*J1Vk zvOnte`kl$Uqp9yVyx|SZ&d##Bx$|jfneFI2SLf2L#miYE5%Vq7E737 zwbHP(j@~_kY*s2@Q3|~r z8V#DePod~_>3sVjabt>%L)4pX(oUD%bKCf>7d@Zf{?m6N6GxugO`^Q_mm!?H8oM-0 zyT--R(N_D~BWvpHL!Hi5oqmU2XN6c#Z=P<}3LzX0kE@EwfI3#3$R>hbd?^a2Ygxmyai$L7=}*Fw-f+!;PoBO+*na@PKCk>4WdF|<>smEO^QWcj}Xd+ zUk$#ye%+x4Ht10LA!0j3JS)VqdclP*a4xvGsLDV8j3reX?H!^B198Gz5~)e* z35_Iu!f7q}^_{Ke>iuzF)M~Y^@HDI1YZHPjgbV1T@zj84JmGO%e9i+IuSKkNE#>Me zNs{1`S(fvRWM-CV`}RukyxIe+J;e9~Nu1L2p25-zbD2exPzsL}5%{~u%ITw#%`xJg zlgR09KpNUZUIZ15@omK8)2ukh^5L5}xzUD1u)KH^jj3^7^o*zQ*-w6wf4$;c7zt2< z3;fcO@L36gP@y&cz*EoIx0O-13d*gxkH@xc+xBnNUK|!r)_{(HY9_&Xc>2?y%IT+{ zPJjLoX}sRr8R}!TS}kNV24g4=9AI&Ak>$E)&q+IJPEXL1ZHi-S*pbW5-9lE^d7oYo z$1#)Jwn1|OfbQYL95`^0W^00(nHg$(o5XR<)Sf*o?!N(%2+Fdgrfaac$WQRqSZYtj%u}9cWdRW$E+d~P3^SSzG;Nm zRiCX7Lpf64w^PimX+<9IoqZx8ufZv!niRIxesc~leUNZ-mPAJ{Z#7y=ldbx});LQI zSlUUldVk#y&1Q26LP2?3p(NDPgy%l@xr~jC(d~AF+JFxfk+}8Z zeUGf4GdDNKSfYq)b+&Ea&SJMu+3gUgnn*>D!$C&GV%7z@@d;GB!5G3tsR7KL4$6ksrtU5F2K{o%5TymRqq^ zP;SM2H+Jsad36*;L1@^yO@wZx6a|J9$5c!3f~P+fyShNLuF0}KLMc#!IBn3Jm?dpb zgUh(#U%$$?|LJ2aEG*EGo>MP6k4v8QM5fO=lX%Zgj`)mzZvet$mJcV zhmDOs;dq5`0_z23B=KHSwp-}Q87K_jzv7#G?{7cM;;~iwN}$J^{LHWX4DB;dgQ-dK z zNNwsX2U^)ocTj$Kb#?igPJck2Tbzh+Ndh_{GbL7NN*T~Ni^K{wy8eZBfg>s0ru8uT zZ!d$cpD-w=f&&t=LgixmGQkOn6oET9kc38_M zWkOOW>w?o#Ae_Uiz@aolpi&H|kT8G0+eCMD4AD3T#IfEictAvL<1`$7r;s?2ZxqszCG|IvuxyzJs1p-QqpR*KnNCZxPePAy_BV;C3fxF#q8`XS6y`#pZw$}DUKY0 zI1a+O_rxNtIC;b37hv(wAwK!ZPjdM1VWy_0s>6Ms4}9PQ6iZ9sJyTOtqe;}E&vKS! zJo1r`q#noB;KF8^Rel;B2W@d=vn2&tqBV2RURe-12Yz8;7 zB8ckW6NsO+mUg=h-t)~bf0-|S@rz_x#_a4YjYfkjuDF6veDH&y6tlZ_Bc&vcWBUDm zFbmL{R;$JN=byj%Tvc&1H|E#b+1af{vxQJ@#fcD;laqF4W`<6uv#EG03KyiqfZXNm z-LZ|mGgI`Jj-paU#|~(;8)TgfktQ@6Ef9*UF8w^)mI}sO4!e32<4&Rx5GH7rFD*!ePpx2+}s=p z!Rp7qN&DXpFx`$=o4<~!b`ue4Rn@!X~lJ-HS+uYnQCcFLo)AK9c>jq{( zFLx9UoK$!fBUG5+&WA%=euFSL5kg6X(3MuBrj!u~jZzWLijkda!T{kNk{IV5vau%(xe+8N(Chk~6|yR-X=;b3FeNP+4s(%rC|jZkT;WJX)%10s4p=j^d8|bJtM{!C znjD2z8e~ap9WoY41rKN%9`b$5-o`;=Hnk4jU0%tWAgLOr42e}JzJWWG2S{NA<9h~QT zpZ+X!{Vt7W!qLu+OvjoDD>(X@&$0aI^O@MT3q7-q-r-|tuTUZlvPZ7q+%q0Ptf8=s z_{dTRgmNfPH@9s^wzX=u3d*fGLE?f7E||am`s?R5YEe?T8CqvaQEcD7eWW*66a`Tn zJ`blni6VMSOWb_(%_K>Ja|LM>kq>NeUyvoojva#{r&g@Gg%p{I(IGottjDiIa^Xj$?`qH}rtKq-6b)@$qrw!XaR@7GPQF$lBT(69FB@RvMsX>FD?S?Ay1G z^Ui+|^?IG9)fJpK)Z+xoyT{|zS|gDx&d;;BxX4VB1WnOIVk@8eEX%m*rkj}9eG0}H zilQK`_SH}&Aneo434SAAUqpKAfWB2wZpDcdk9yRj{_*|qfB&n7H!vwB#us}Hh`lzDM8ej=hU3%FEJ zFC69JBjhU?NwZ12pMV%(QUj(S6)BV@NqfBV^L8U~=8e&8HkqED-a=^ZM$AeAA|c53 zHkhXo0TB!RLA=&4KRg%=nm2b2omDz6b#>ogL5{j}!+X2ctj0YnGR%l!$ zZIhU_>INNr#-*qx#H*nPYpWq4AuvvYh)`N1WW5?58ghFbLR1_e8FU!|$~*&EVhV>T zN_2oygRrV)RMf4LB`TEF2b5GvRaxEpFp+C=Tw!o|7e|7tP2xTD1a#3TpWubBR3tq~ zEtm!rroav?qO!pMiM`&U2o1eoAsr7 zB3Gy6R^j-A#@T0|eeLY*4Bh=Vg%u>jEp;eI7OtVC-E1ISj<6>1Xq`jp2ve4vWK*zE zeE$%(-@()sXu+y4&?=!(NMtAD(D%O2dFNh$_8Mh9PJ68K$h)b^wIfea14U>5A?kQs zoHNL~bQ{6IV6v-;OG)R(LvWF#W@A>-lq&ez3*X1uk~q<$g0ku{;ho=PZTr9nKJbfM zt7fa9+=>$<&O7hCPXe#nh?Y>b#HlXdFGj4JoVI5*}s4PdTY!%bm6cf!h6Tj#iPVYjjY#2 zNk!^pKx?c+Sc{1)joC5QM2EiagVq$r;G!6Vrkm%C>lC5}7(-Nelr;gJ5rU$+g~iqQ z>ChQR`pr|mwt{?;%p#nmjldQLgClCy5VI|o1_Pp~MmH~LjJ3(KjJ%#Qun|dXsw!X; z3cE_Pa9Hbu8%dmSIO}5BZ0WeMIC;b)9`VmxE9TyhX()gN>d6B2WYiLyFI+P(-3L1T z^5{b9IO_YX_B!M}kEyCauVOk{<6RuoPOF=@)*6q7C}ltmDFjY}(Sm`7!U%NKL zSRm9_rYcfJr7kcm4jmp#WDB%0gmaRE(n2vnSB0afTIjyg1yoc90K%gP=%la?X9q+^ z5*dvuO0E8R=w0^${J+5+ier}IoWM?FfZ)nXd;8qGAN z9`pAzaJ!tfiLJKw-Hsa3 z1##S@aAjz@tC&t!Fgvy_{QbgED|5QK&p;JK7Nm~J3v-JRU+r}$Ch>?Q;nY)4-=Yw0 z6_i_XqQv&?+q;)sa>=#(_wPT$dmqr3qQJU!y`IIz#SxdwT8kG$3pPR4>yZe_)YKG3 zbyAknZy4|?LYriA5|qMkL<)aEgfbKe5nh4u9!KES?b)-3Z++`q=(^j)OUkm0Q%*h^ zgus+#5W?P|E$MBTSYKIL0V&Z^QxpYS>(Nvqz{4K)u=j1Pm>*rt#j?8?%c9xP$;?bG zKJx){s)Slr&|K~?x3ZGYbqD3-;XFUX8m*i~W&NV1wOhT52}LQaxdHy2XJytX2X=cKETyf&Agb z;~w|8cS^ylwANT6d>IbT-n+oO@NoF>VIFkmskmaD_d9YXs*u0w>1on5CCj=frHG=G zvMhp5WUa=I9Xm!jH1Nu8)djkH&%exbA}N`hn_Jh&l&0xb~KW@MY+V#zVaE~Q#y11X|+Euiv71&XNXM%6{5NHb*_ zh^D>a4(G2({1vqSMdL|}D>4@8L~0XqKInMfw2%Y|WR;wcpI-b@Wdy?d@ns~fC?Wz( zr}D!5df&`H&ari*u_w-GositKVbipBK-2e^lGJ$RWJ>`x~bz=%GypORhF%A4O`uh{^ZjoBL;`QMZXL$Tz$z+ODM z%eU0uyStqJ+JGeP%T)geZ4vUXJ<(xEgR172CkLDsU*Ud#n2Fyrw!D;wEv*+;8M(>p2ha|o~pB$%*!|4V? zQ)4I!7{2ryMU}beh}M_p?}o+P7suF^Czbm96wE`eN)s z5JHu2iG`<>`5^K3v62KNuE=i~NE#n)YAHdL0}snSs#R72#o_78BE&G~vuCF+?$g?U z@MFk4{)&yUmtY`9bMK1~oVVS4n%Qwqs*J(jm92Z#)B;6FeO{R|i()vrkOUGU0nml!lP zW51*nxC}oHvJB{qY;JDaLLF$DZZ;!?tLjz$Hwkrpv$LzXKS!J%f^U~9y=E(mX$uSq z>#WEo$3uhG?|I0d9`7g%(}iC3edJmwcUMd7`b-eII*7aPN4BzKYDb22mX~hjBMelb<^Fb%$xx1a> zy4ywFNUkV&-09l{nm#D(d)av+YA0U-ryU^@x;LaY<7z;cX$M8`e&U0%ep)Mzc~`yr zNuzG#10>p;+P7lvm2S|sFJ}fKqVu7Jr(*~Fv;mk4g7zy=T4EThWhNRkD)qg8q|Rkk zSaF_O$ZTgxGD$Pk5It=4ur)@Zfp++q<&QRj4GO>1TJSj%en?U{{H})*!&AX&1u03# zR|iin0mt4cTt_YgDs?FB8c3PKrFK~2KwSAD9~%u?H0&&{{0qofTrl2_3;E6?pdg%T zQt88&dHXQO+t4J}y0W``G4}mHTk*%|LSC=P3P#J1qbRC`+Fju`PA;ZVzV~(#FKbX3 zDe_rQMeq(o1-;>?wZiutuw@-adcRPDt{69_6CFU)G_jpac~#U$UfD^BUf#?Q#^xD@ zoN>`he|p{cz%!E-@ljJk80@lLvqoQZ-8i}K5Y`izuVD#(JskS(+OU+;z_0!Xfpli z*=+d(bnGUz&UwzNS@>Fs4mOFVno}w1B8`WH+Wk{qheOHHQ|E8E)PCfB_fN^#ksiN& z^>loXrmd^nvmx1+2T5@r;|l>l&VgqE7)+PvrhGoUyh-&8cvXUxUK^|r-MU4=Sg!cS zsBen>Q+^10_gH(JjSGkLrP}9xVyV}PcWlsA54m$PXVlxWrJ=O zt=$H-Y>2(sz5gK2>jLy>ft?>-?a?E$?`X93^>$ONE3(->JbyhO!yILde7{JE=c7{R z(}urm@s(X9HSw_{i5&eAQXhm90CM0-U82j{+wv}DrV9RS?;F{IT&IKr(oF)AwR4=#95 zG$v67C_f=mu_|k%;Nu_fq4Hy|ohA?)Hj)2X%f}n=ns_*F5IZzQKGbUF0Gljc^|j~YQ(iYE&jpatYfSziHHQ>DId(CG-H`9MOC??V<>UYD|w~0sRDmI!Rr8qG$lQM2_ z#`3f;O0~N__0fRoUA|Z7g;jOyXD;ClbbbN;;)e$?x5gGNkckvNOA{hln*yq{um+yrDMjE-0qm;<}o zzdQfa`$}e=R!az>&VTpmLy3Ly05&K4oZm4yAPm(XT8#bRQ@MT_ulSw_&oxo$B4rR= zrBEu(|6kU@e+8U3ZPZElR@iY8BDsn9g&2CMp9;aJUS1=Tq@~iRD>%b%zJ1U-<9fQI zTJ6!Sym1die`Q*(oV!Lvo&Rk-L$K|Z#oq7W#>0E0;ll5=uE8pN+G8jJypKKp^e0o> zftz}!VygC~mj2<{Ph6iL4BW~wdmi~^lw*DvHHY5Sf2F&;&2{F89!7oPbO|*eaYFF!u1OW7Mt~@S9`3ef&&?*S8=;ROnt88 z#ytDrX9f(m;MGx{T8B%E^)SsEGh+;&^pf0!Z?=~ItsMeTg(SvX@9QHZ=wNN+yKzY% zIqCq#R9hv<^yWSyqN<7Xse|1J>vl4?Y}w&uy25j@Smhj$k`d!GXy0*bR?&7YLu&*Q zBlBu^tuJ{Fs{^~84|(qbKlbr`gU%{^t7-QDmT^{RF7%jk+cU7|>RrU!pY$q>Sd8$Y zFS9Kp6TC~XPsvC6dWeB6c1V__z$Azz3`p0HprHO#eB7g-s5Jg=8Dsx6a;Kn)i3HPft<7B3S%dwsX9nsY?3yL z_+B{b8T?}HLzkwFU~*C{)HI+;dS*bU z%v4ZLU5d;^NhS(q#(MULlBFO7>)^SCkP#{00kk#!9_QlN{HHaZHWj`%V;d2o7c&jMYIZCk`0AJk|Qd`0!? ze=`?qdwvqI47K9AgPcAX-nt51M3E2DCWwd-c2N)ZJ&eH~#w4G%6`!^(>F=(DIJ4U4 zIRuhhZ`Ip<{yH_||1yMz1-+Zai{j0XrNgxyv~c}a=H}pSQd+efuFLmZYcq%0Dxk*V z+G)w)4WXv1^N5ZmBU8D&wam9{i#dXZ`a|4wyF9cMJlSbSpYFw!r(OH@gIdH^Cju@R zQb`fDej0v#8b<;GbXfyy)L~q%mP2#Ip*c!lyb2{=eNeHP;+p;J5@Pd5)2rw!+~v#O zD*B;RUcrKe8s&P|V?i?RFUN^+3gU#Gce8ww7(|i=n`wYPZe*hqx8~FFnS_JQ=bsuC z?OCgK7qhHBe+r*MgtIRFm-(4r&k~?M#$C_QKJ!adUS|QHZtfro3m+L8H~k0^Fwx}p zQ%}~LpiT~t@G`0nM#2A*Y)#m~XGy}l)ybehr}6*?8c#+MClN_r=&zcZVw3i$vU&+d z@yfaPVJbSu+@8Wv@3ni%IYaPE5;D!l-&L}GKxYA_%TK`~R;%aHo!j7!!$(5owi!|qnndI! zXdgfpxvv7f1exydfOzb;9FpFCu+j7}ZA|Ukxj&dZiT@e?=OG(Lq@?DS03lCfwfrzi zCD(h_RV+qDF*o_(QigL>BCZ=J1E1za9-WQ*lWzxx%9uelysq2x-ATC2=v34coF{D(R_K4k8u#nvaERPcuua*9`;wskwN(P|3KDtTzjV zrzvx7!BV_dxA;>(NpN-oT2ju})BZcQ)eZ~9R44K(SK;i=HchoDI@2u`htsi>d zQw1sDhcmyoYrGu{5&FxB%bUJJfBav_t<_q@@KKk>-CvI|IKEhu9Wih!c+~=m)$ava z?A%|u*`(f7*VS~hwqMTO-aX~^T1I_UHgZ^hXn5@QFS#%b{(;N0JDW_=XOrNm zVp`j~SF`gxU$s5JiwxC;&|!3T&ascka3cqk)d>EN7yst~O&jj3{MS-%=&BR3JaLsh zO(Kjlu+?(DCb0`a`V&5^-R&lB2c)@`2EjAzeU7b{T~5v1GF|YUm!PMsCc{;0rs}-> z8ztR_*$EPApZL_kYx5;lQJno=2T{axdd@7JSHqf4v+$S=bvcb)sp3;)vU2w4PXGY{ z*vdqtdwfh>xsr4NA>>?9Q_Xk7_L$8xs`7~ibFx(K4Xt-O}4pgtt z47LacQe=U!P<^fH$r_luPjNnwg$?AilVVS!!+;1nK-O?N3U2k^5tV$Y`8k>w;E6rf z`OGJ+>LrpqyH>5WK*33o?5HJ4iWyR6cosC(sDV?FjfL7|-+iDhz7J=H54b{SxAyBq zMOwyalxaD5T(?ncH&Kd+v&{ArNBTuq5JtU8wBW$!>*cyCC&=F>Gt0-r5OKdRUnr)$ ztQv^Rmg~NVg^PGPSu+G8VL4*OGSa~CcK8TfM>B?s(MA>I)2sHTG?u+6#zuC3zQ?q_ zU=oypRawSIm(Z9-UQ(wIELBy|TJ`n;^6-y;)G40&|pihoD%rJ zgc7S3Dg$k_|7h2yM%es8=G$qC9i(v8sy68=6u5S3MC3*Q%Etvs&!wfc(TC~}6xrgL zbd1_U>t(g$yHWOj(-9d@dGxk4@y`lB8}U${ww-eDV>0+k*wDuT;ddIX^`q+RD{u}@ z40t=&b67Q_SCqBo_fRoe2PYQlBb&1A@ZWnpI%@oi-W_8yWtlLRfGj&epKC4XHSiH9 zQv+x@nKFB;d&`DNBNa!RRuK*nj^|u-Nz!cEZN0zwJ0J{v zzNexTCtjOLMl7W^#O>Su7p$A|;L8H2;8StT6O`b1aTLl4okJ}gb#GnMqtJ@fAjGjz zpTVRlfuvB2;FGYwZYoBVTuV+^rBM-&WAJJ>B~zIQ-(jNLx>>hrZ2;@Woy5?j8Z?Rl z7l^OS0mGotzKj)57fVOjq=DZ;i5%WL5t<3~Kyx)E95eyyI+vi=O~c7GWkp2t$piwj zWwZqK+w9IJCie>Ip*R^*uq9wtckQQo09W3g=O_C)I5Cvsi?7QRA$gmTf#A*KJCiXU zyoDJxA#ZC{zgt_Q>;58H*!yCTv?FzfoG)sZmvw|&l+Rc_v;8Opn1r@Xhk#}>G2<5l zVH5m7c#xn6ftp|>*2^#vNiqHpte6RMY^yZ6flV1s-yQ!b`drsOjIJRJJFZqz8Hz7& z)cyNR$~MG-Gi(-oDG9zTT4{B&dHeZU6E+_{dIp<25jX_&#!G9Dm58fOO6L4t;())M zUCPrb-K1l}k1jx(o&lxThB)YE6Z4II6keadnz+;s zE_mobyT^+S%f_W(v6RhY3KQEAODOnxp_4R|Pp2F{udTN#Nz7MH`^~NFHfulZ`sYeH zi++h>MdI~_i!6CF!dCK*XaRk7v2dt@(}fHsTDZOa4)3uc+$Kibp{_N_lM zNJ?3UrR5g`tAzY2U}^E{QT0DGx6=FF6kQ!{jrc8s)PHzSJBCj?(A9@s9>lacF!D?+ zYT(;bL@}5u;rzDY^4JSt-A?0BgdWw7tv}R`fA@0omr%)NxVPxF;9t^-fTZ-2*CPXe0r<2oD+i`c1h3BC62z`D?J*Ng~Su-v&z6kglP5q8ZX3->`nVIFhytD$i-acYv z-dvh5fKSo;MeUH((MXQGsBR}PJw~k!E@~Bh&Iwi<>zNw9m1ebN_EzWj&3ljko4M2% zNvybAg#+>1oev}K`gc!oA*YQ~5QMN-#HgJf^<{ulX=l>|9Gvc;`5=c>S%h z)5*p%amP=3TspF1V-w{&_SVPm-2$A*J6PSs79!bxffHo1d18sgsZiS0Nx932Md>y+ zfz@SUZRomQ9oK$4QDbDPky*C+H8X|osogJyIOU-G$}Ok07CW_0i|RO)wa9v(^-u0VU)d3 zPyQP4iuYYxXhlrgXkt+ic1B*zBAz{r^qIZV0`9fWbKDAR-WY({3K%I>fvyc2gYS{E zWX3>C`-pviD83?(lw# z2F0zN;<}W}N=tq>^Q$?ie;x}cFN%HGWPRE!H3V8`=4*;q!pr^7pYS{Hy%AS6tUeo< zinfZ8Br(26CTr~gBKv5b?h1i^;)hiX@ht-rc=Nd5buH`DKOn|?GY0&yWzI~=N7xi4 zgnmRPEoR{86Zm?anKA~2>>bKa$&OXZeZ|_E}IF%LcHcGP-hopn5I+oogjbd~o7iM9XDAG{T=h zV?knzy>(!qa+M>4+2%N_yosaQA}R%jqaZM^-D*a|wC_c_M;P1dQTXeJ%WJ+6hm4{) zqKnT;IyU(l79pymkRME8?T-QJTU(m2G%Gy;BIi@d(WS@BLgB|5_qMIKFkY|Ay|H*j zq1%H2(CvTV+=Loj@A;lp_r)ls?o zR`7y_Ch3=^=)yEX{Y3d>G*A@sZ-%otTH@AG31#?~LeKr-E0iGV}*k#UHX zwHPhKw?X@NCUjr;!scl&<|z~xeACmmFK@ruER1MlGQ3Ec1Kn=h3Y?tpBMxBt?)|X= zXbShsz;E7U^}RiZ>bcIUGk^M|w0%36|8>16pKBO|9)St+ceSRY>6R2q7)GxM4;{Ac z8Zjcyo;LE|D$Pv(p`w)3#jQt3vthT?9N$|Ri1=Goau{pLG4pExh^wdG~#Y&f@MEKgJLU!Pn_tHUMXq31e z%F{!8zN!R=PpJ{bcIO6){_!tjkZLpiD4o$}?X!6>Z9er%>*T=uW zkiASzR_gOhKgfp>v?o&Nb$J|HR>)o!8uy#J|uicLEP$7V8=fj4k z!7cXi@~^y5X?>$kezc>V(9<8-^6luCQbPh|zxQ~M?j{=&8zE^Jm8R_kGC%oMk*HBAq z{^EHE?E?P5C1U0CVJu?)FVg2(WnI!bLajrd4{}Nj#Z@xK!}ut@^<-{C?oSWA8@-rg6$$$FxQs1>|xi{)sW_Ow+}jq74~$cn5(HB<3Dr;0QJcbci6J) zRYv->?yV8iGxao2LG?I8pLt;jMsUH}&kqgJkc2NDpcnomZ9ks#Zg+a9Zb&+=`&fl9 zH!;WC2Pm+2p)uTub=)VTW1vEjo_aEII%;7(RsI7Q@IU6!LOf_QXfd7|g0TM^e;2m- zdy32<8Vz*%v+a;|?JQ98vA0Lq6Wrt``{rQK${d!RILpER(p{TU@}}r3U4Xjow)Kmz ziVFO{hy(j>-ciRV_GZR8sVQM_DkrqHP|}wQvlk=$J}qoY+V!;)M4-CnX{Nb(m>$=e zXg3$e_<2`YC46+aS*+01&cw(I#nbtzNlJpM`1gI@hiDWP&$J;1`Cv$#7yjd&v-|PG z2vy&q*W;lf!oAXa<E1zw7-uN_@i-CmlP%zvb3_pKZ#?m|Z(2(LR*j$<20to}_{=jt2<% zp8xq_fnIQ5ntuEH#^<6SA@69{d+%LM4aE@)3 zTLaHJ2yg*UkmAVNr6@;l;36lHS8TvND&A*7`NrgD;V9b3#6V|N*AMsTFCEI6p zk@xqC0DtGzs)vuIQb)~;G6ph8NOZ6&H?`eH)+JhZ+2>hOfhyZ?-w@HkEe+V11V&)e zN@zsp!(1U^Qx20U^or3@?N;ExPi?(`dXn23*y$J&_PAcy)fJiKZ z6N~fG+NE6vvjqaNo)XPdIs(J#KJFRV+;dB)7*<2lPWS+ z&1R(nB*<8#r~?E4Lp3318)R;%ij0!d`h+ARN8O7;e->qGQuT71%%8RTRWw^Qx|`2v z2RaQFD$4snmTGitGR~<;v%~h*26qAIvhZq2O2*ev`PW^bwedn>!IfG475xiH@pEs@ z_`+S!Dni!aJWhA*EZSYqtCM8u5zCHT_ocS=Ko{*7pty5-e;GF3{Op$kgd6u&&f|jc zzlyUFcJxa%O)VYHzQ16{+5>+@E3oq+yZ~DjESaY+S^Z(9WK_ix>myr2zmiwT^z0{{ z;m9mzsYvi_l7=-R`cMB`XBsrH9&3w;jSI@+$fL{s!Qx_4B4_J9+q8OV)f9eKn4H*; zlIZuBl=+I6)hPXvuaZhq9297A=Ejx8yX2OQ2DS?>KK%}ZKQ=vOu!4L_G~TJY#E{hL@cxMR%>oI0u?Yq@P@|r<1=Aw1S=Y9U(&5z!=YB^e0$o0yL(&L zQYb?Sc4}-ggYNnedV*-`K@bhfEuQr` z)@$5X7k^}1EQMTQ)w2;!BLgb-Ly2^=8OtaM{p<2lIvS&GqC`d%CsXxCYf`hK^`s@@1(0&`=;yk*4@}jEhllWcLFqWPcKY(ih8IpI z!A_UWlaCEcwsvi2{R4V#3#N^OL?3Ch2-o6)lls^Tup0ZiH!kJ3+eX7X;O&9NgPr$0 zm772rAdC;FKPNMMCqqVsfc7|IZiUY{-JeD0Sy&sadG!%n)fM*3l#fmsL|X|C z-4!_E=j?Rpk^xr<;?6AT>AlwsU;fFHJDpzgWlAf^@imUD5JW#Q3==!|N8E(A6%A*k z>7m+v+yX7A>3y_c-W&pagIoD{O4WH5)hUO<>bWrD0sP`2asz~f2+^A3N`R|0$smWNNQW2yl3g>pWz*lI`9B)IdX2w{iJhQL;VX;*CeRa*{JAx1_R~s~>|b zKr3nb?XuW=ju7k&6AGg{k8;fo<~pSO`SVBn7=9nq1QgGXk4O0D2Z;+kO~yFhA2(iH zRr(#Tfi}VvA1;Q%uHjszosZW`takyzHvtGY={q;$?S+&fvKrij&vls=wky&(vMJw5ABT)5u-uJ@kIyIzbFZZzGx@~#p>|^&0 zX%N!Zklfc|3@1}YEDIBKH8HjRqR+ht6!xkv{8`Go|N2rw-VrY7h+nKD^2aq|(ztex3J*P#&rb-6c=r5u~W5vjJWX|8^ zD^coB&-3O*m{z+m=k1Insk+bQV4Hn50R{U#N-CY44N4k5HB#LTqCLU-ZpDeCsh~-$ zKC$*bkJb;<<%StSMoZ&DC)I0k(yL^|#R~Xl5@^1Q6^}KEe4|(;xwTUyXTb@0CHhUi zlLl{%gLR25dt_oE>RT6+b5tOY(=q@kUGo^}?G>ZbEWGiv^h8%seD=o}c9;y{w84EY z1E>IH0l>4x)S8b|0#xa6E>-KsR_FEO@)}Fy@_FgV$OvF0?p&k7H6!PlOC!^YHk$*= zwNSpgir~!R%av=Ces=CzuVo(jsA~N0#@UhLVo1^N9C0V@wTz2vLB%(<;C#B?)tn{%d?Q% z^(rQCq}B&rP`T|wDW6U;Su;{oTNici(M+||Z&V2T8f{Nr_NOXu@=~ISduHS&ypt}$ zK~GnJrUO95Q?DSLqWyl)r^UM&EE7_LW)df~FlKmFqWOqKi9^ZTE6?K_&Q#eJhwHm1 z3P5X_z9$z?x0tXN*y&j@?CL2=Q}a3e=oGoFFo|}nQ!xBl?F6_k21Xge0UuHhL;u!b z(f7R>_6xN!^?FFI(be8$EY@$KuDXzgy74H`+r>y>bI8)T0^3#(p&Zkc!c0^Fao}Y0E8}An%u-e zF=mr8#qr4bs+he|>__v}FOBr5)Cl`Ln$0)~sM6tri(R}r_?j~Nz!r6fdw6vx+Tgau z%LyBuzpCHq8eguidT>Vb3czI!rtx%+QyR~5NzY)v#JUs}CH5{GV(-Wb%Fi&BBYZ^w z!6)JqVs-fWPT^exRR2?28VHtSn=0h0%+X{bQLUGZ&9h0bMsPkFe(OwLUujp`h$voQ z0v8?=DO<9e{>|rUjIN2TH65m^X13lsSF)sE zl)K%pUlz!h!riGFSI+7sJ9cal$LaPSJyeggP~m&+1PHyC6wZJeOt69ZKlv|~Bk?r} ztKI-J(du^m&DGV_Lnet#QDU}Af2rS&JXa-mlGhuUN>b>BZ--#9rOG~?dBNwL*E_)R zJ7svc!(^wQBFtU@(PepV(jF7Kx5;=sGIyTi=^VmZp^e(1sn#1OTJ07~0((95|zGcxP6C9OyMNDTj>3EzBO1@1nlEpzR5 z6b#Wi#()x?G&sRjx^qp~5r3RblwDA86o57MFFv)McKhP?4cv4HsSeBuwBoQb^opcqpC8Iz9wf+$Lj!fYZKC_Sfx1R76#N}d4`)$nH%?!iLw&zuC z=GmpW!!e>M8i;tym}}eRvRSMP8xVeS905+kCnr|LB_QnH6VYLB(9+Prok}b1?&sHW zo6makmh!E})i107hc=nqgw5p@r~`b(JwCawiNZq7M0$@faTN z=>Bcx^8EViyw5qxO1A(!;=b0p_Gq^+%h+N66NFM&2^b+SM-+X!38Ejt;QL11oF*%V zRhWy%xZxwwXxM+xcQua8D`vk79&mhP+YU>zA8`I{Jco2NuK5WdPWQ)8fjd~3`Q!Pt z=VXLIFOZHu=?HNoJoSVzhWlGrwkHYS*9x1!R@-}~S(k6DC5~Aoewt-K>FYaBR{;>G zEx57s@%X#8+mxJ-8UnBrH4%7BAP<|e5$D{u>}2%$fz37c%ybqD>LC_U<&tMQ<;3pR zu_q^12TNF5eK)+{D1IjL+g|6B?)<79EY(5`)BMB1 zygdm1s?+(8K5KSqLJHGvNplJ{7d`K4Fj21Y_n$1|B?RoVgt~20`16?t82G$evFxxIPP442wc%azd^vnTfP|b}99|}Ud zqd4|E55gZh)&qiI@EHj>b^x@?-ge?zDNsD!Z_&30sFHRmo9_Zq5;pIz3xOV4=(0l( zcKS)xn#t?nhn2NQeWE68j!5$iPdaG2J0%+6l>he4AHY{a{W{0FfI^rn@U2}3a2bl? z8M9fI+iHhCH_1%NvWi;^QofedK7*-ob|@2y>v?Phfwta^ueTglkgvfR7#@|iN5h{U z9*~)|gRk&r7x2}aw6Ik8-v%is8N{lT&RW##0HHN)Q!WB8i8bf6j&aa)wvoWm^+{5i zf*%-5ey;JMeNB`m-4^VY;^5=EowAMcSi9D+UDFOXaoP!=mKV%!<@l3f-LlVeF;dzZ zT~_G}bV_uy(M+K3fFMa8P+%vd02J0QVJtw0Oj%{6VPi|pzhSWeTj$!kgWxXe!s-19 z`TOsX_=UJ`YY>@sqhWdV%6e|I$XPkQ>!h&S8`csQZ%-Oq2wHeW!6V5G45;MbGzY_L;rkoX8g4y8lt4;p@^VBK3u_ zgH?Bz+a0m_7Upvc$X_2C3oT`|2`x-Ya$C~20JEJi?0$)$oDp$5tErmwp@Est>xNAd zcg1~bpPL-50yN%yR5QYhc(?!rLRv;q&lv0&*CVu}i-~A5u zafO-D%&Oz7C&n55`Nfrb{#f{U?}Ap7c#~@>n9^K-&*B)fzJfAh#9W7G)m!=mbL(X5 zf61b*GJ8Otz&Ic@l@W%i%Q|ICkSkhXc5|Av{x`3Y+2pV^)7w$3HB{5UYb!4*gpFx# zL7YUN>VAYX3h_>=m2{iFipkdwo<^dx{1HallvWSw3q;beY(r*a^TSZzQW~FQi$Xbm zJCbj5JE6j5$u-+s?|}D#ve3%nZBC?wa(RsfRJ*~K+dkb4@$gXYyku5TXmop334Tm$ zT)E3X4^~{Z08L~R)k2P6rkr?%B)6T4W5LUvS2FcS5MFS@*lVvnw(Z^V2T?8|R3x|G zeE%-eCQ!DHwt)F|++Slhb!*9gc+!$TH~%<_oLsUI1~^%sv9-J(PHLHqNclHkhd$YN zRU+;x5pVzztq1Fu&K7<6&rUQ8n*Opp?9g>0g;~DZRD1UNuk^@=-1MPb_zR#X1LZ5w zfWpi8qpcIQ6%IdRAn-ivBgy3CjrO0SN}P0>v5^udkdPhf|9$q|)32k} z+NG1Kd4r&fNYUGh5+(fVN{EhsvK^^>Al2qHqi(L$u9o<1@Nb6hqz}pzn!?Un@sub? zjuadm96(!D^V*s@X9JzI2%xY;q!&iF0|4$AA2d>U_2#jrQsg0vR+RBY6QKF^GXXvZ z!zXy>#UhW6HRE|2Z}iXw1&l(wa`E*R==TPGd3};YPcg8N%O+oFRnC&hnNbB;v*nWz$!{lto^qQ z4!o76xeDn$_f|EeN;y7UW3B;#^&oTSDWht`BfXzX5(6n67vMJQJ0Scr(Ga|LzuH)P zX~gL0S&zJp#AAcWZFNDAVwCnaGA9%?8vNCqYgOCYk~{U^zkiXsAuE~6!A@K3QMp;p zTSF$$4DS=(+Gh6k2L+bkz#F^mVLTH+k$SOq`Dff`Hx0nU@-tIYGU#Ndtso^cO|07% ze}^L|4wW91Gf6HK@RUY_{eCfY^<#pl9X9nC#gXP z@qEKF(S9fWJ9(1|@JVQA@=rebX-C#}ARJ|k&patzH`QH=jj`lAPWy~li{OdmekypU^TvlZUE*DN?}g!+|jIP^(hBaXR}1l7oHWcTC><_Y@) z$lV@g0T$!@0BWn%j4r3{oKdjf)M-N~POh61r3vCzs8Sr2$gtvcT${%>;k5ID0hOMd zWKn0lEWH8ZpA$A|q3NyWh^5uLHA=*Ja;Fdc`5s9E#2`@lj6TP8@9k~8SJKhBLX}|* zFS9zXJuXWJ!Z$q!?3#CySO6yo8EauAYHJRI!hxq7*nC(>$!I@Kz;fS=lg$o`jY@9B zDEg|k)q7aAm*yo%fyl%3%g^&yDfr^jMuO#bxfm2L1E$#xm~8eLNzG{L3)Qj`dyvB? zw~W{+3$RJCJw~j|69uM!>`OeF7n0wt7my9?ZYPk_P);~nm_>hY9>3l(7#pU3@0tQWhN##v&%H_9jzm6I)kTe3 zVjYTW{PjZx^m}a0dG+Z6o(6A49OSd!>;C|(fqQ$|_n!u9%HD`i8s#kL2pul3ph*}a zBF#a$&lB;kh3Z)U4+k|vUY#0BQ(|B#StsmJ%$j9*p)3T92JByXWs;hr6804QB}df{ z>b4NgqR!!O?$*H^HY(3`Af{4V2yF9N*DvtQAov(Hsg&@lFKgV$^mbZSMs*J4+d@f| zVK;-DQQb^q?uh9aE)ZRLqTbBCTD{+-{x*H!z*i9$7e}o!bVzW$%MH-?^hJMAmH)ad`Ujgtc(^4Nd1bpj{5nBqZUS@ z;!h^+U>c=dLbm26Y<$Cm7&O4~5epzgL*`~?cJ~U6OLaE=q-xsu*v`mf-gqP6k}NQ;yEJ z|DHqFohHNH7!ON-Jl&Z6z@2|KE8BJXHYP2jUA{t$=3^sx>yX5^f6nlA@J4As zSii#ul(Z`k(@8q9H!*NjS*a85gsh^EQq%n6d{y+eP0m>$j|pvc4#8E0(J=W?k3KPj ztZ;l1kw&~KaFyRJb5wx&Uw6T~OJnabp%;;KJM>qR$HW!K>Y?YTW%PgX@Lm(HJBRR) z6%rY5D3i(@RpkcKPhq5epupxpogjlzXfVng{-^e1m`La}t4lY=l}zo2vukrxFRqvs zLmxspH$84^;W{RZ?)-l8u(<)AVNl64hHj%*u=X42MgNFYq2G!{vU}$m{rH6=%S?az zpVfOueMtZPqSlu+8*aliTIo5`c-GB#&EEoxo>OS-8kYt(d7t#56hJ5Lk~53EYIqJQ zSiyErft{b;#{{+< zL5z+qw5QR31)R9}GtR_LEfP^*DPWWM>Z!NtN7Yxf82^?D$C$9dpioHksH~D0ZU_Q~ z7y5}JV#|bVigv2woCil87<+3+#}iWNx3W?l@@C_+c8#8+t*VBP6iU#>MPvC-5s^H$rGXgx5BzwZ3EgrEUA#iL7n?d>@Kz*-hbSm%c3Yu z8A2K}2M>$SoOu2kP0*twCkR#VS55ExK*l*=&WJ+U?;3jZlKtXEkY&zZ?<8`J-IO#a zi>gnVOnmCrDk6!V7m%Sa_{lMwmBhI;6Hgo^ftimL>HZa$!ilSvHh85LdBNcH#Ov(e`)%a3$bHwh!VZgfuiqAgMw z+MX*o>EpwMm2@cGq=CCt&mujOSSlV$Rn8%j^q+PCC6+azF8xYJ&3wdgjn8ztsb@3G zvApLWQ3|~?GE$)5-CB#+pLqfsF|N1mVIrIJhF_Z{QeBts- z9LDn)QhFwN6tjkgq*F%8fa_z=hkQrdTMLTE>S{=eaw>L_n^Eb;fLWbdd9e1ZRq>%P z%FS0Be>;kh058JI4~|WcqH2gz*{m_P(cx+P>&K<04)pzLbwg-?fQrfQ zi0KRPwrD!&4RAHhR~$~ykzOkmy)G?%9H$IHj&$Fq{Ib;(C@0$99yX=(uZm(Y3PvJh z%TQO0GhdHUnDb>eL?dV{YhGkuG2-O9ik-0(x zan2CL_Kl7FTlw>o3bGvYO#H$Q%R2UnSQu+X84gvCboBCDM0va+|l= zrtd=5dOz!B7?3OOo7Q{^e8HXGCS8F!tG!8Y$_WnbiwpWVWQoMDYy@IQdW9;Qg>^Wl za`&SZXUrM4;fa3u`!gD*1i1qqUwFU(@$F)J$iC&v>;h&ARP+S8tCU#~RA~JRxILV^ zT$KFOAUA;QtzkUE=J{Ej=^X7$lDS4B(vxnQ3{lmi;5fKpyrs@Ht zC`P3m=*mToYfUxGU#*8fSK`OZ7^jEuKD(;lOGaa%9ii7Gd^I?(7}L;PXtZBLtTsDC zJxl9#c%tKfI&SOgmbBKHjk+uiDp=C=iZ1%oJ7Lr=9>Qf)od9gmq`KFFBD!0k4ufGN z?)KI*DJhA1q>9+zcxv)nSMOGJ14H=6#~6X)4VMfkMW*oB`MFa)WyN1od2RnuXMij; zyu153V%}gQT;w7~$X+zNr|@QAy-e1}wzgjk5W^F8nqszRXjX-Z|yO6t;Q)W~y>_Kaf3r%rg0?b8=D0%E|Pte+}42Ax0|pqcfon6Cra zbUL+JWMWt)ejPn2E^=~sy;%OQ=PbH&kaPe0Z^$NC1BtF8{@^wFTybmYRPv3SCX+}M znW3-?E8?`g^*ZAXeOY2On_SVX6)W02MB&=jwx$zxg}=-;4q^tZU&k7vm#HW2MmG?r zmUbS7f?a`h{l|ZP)P1TqC43IKlUbBxLJu3bG30-eaR@1qUqWiU;dYVid6pPD8#eWG zZ0N}0HT*DsbmUE5RKr*Cs{!sf!BJxuAYh0e1CaD4bJ*y*-~yZI&@$b#$MBR%Q!ur7 zp`01_t}Z3@iH#NW8n&jWq+_;;%2CdrdU*AgcB5PFJ8co6$iqB~ORYtDvYocPw{A=R zN^1%#bAkTR_fW_z--=}P@&HNajrcJ0OA5!=@yTyWX3YT92Z*1zUTq^8#xKvVV>`+U zbB!KNzt~~dufbZ-xz_rz zD@j4SBb}Pg;?>r%|JT zO2B)SINHGyuHO3bKniMaHjA_FIJ0kAHC(NCr60S>;Oaa!$sur>h>+_E^&3+6~Ir>=bL1|n0ori%J*yQO@bKmGA4*;p0#18G?6bPwBw#&+*L>~`H+&+ z?-ui=w-d_)v1)W9QlwqK>z5=kiB^*pVNSnCJ_Sky6y{ZstHA{tZb`#PXqH#*v*n;x zeUf6avTAWu$68_Y1Kgq?u2H`QHxx5JbXShiFKxo&2;a`kNfl08cMHa|%sh2HO5QWx zqk-4%3{F09sekrOC8)xJ?PWGa;U!KzCW@re?Tp2(rm`$4)kZ0hnZ(U z$CE2>l)h(p(^>M-a`2X&fXxY4Q{FL`q4o^)^vvwtQr{gN*L=5cEMFjuRSM(60ntI; z;4>U8xT0!AO?*)bLeOvjT!hFqnGS+Yhk%I{r4<9D#T+f){!RXQ*m9N==)XHh9gM3= z@uf7;E zXB~h}mXrOql0CX#8@2ORUfUC7klgi}sTnq%#+)M;FGVnA%PYO2nHOC=wrON{Lj_`) z2OvL&IaBH?%gU;1zrSOONKH?(s4&dJQdF~Ow{7(r0HyvzD558+o#s2jX}Y>~>FLfE z`7Y%AJ&o$4LDd8a1P38KAsmdals_9UMbmJi(6qto_^Z>=i~CruDs7Bd1q6D~g*$rn z2kvpmE=I23(OB6Wi!{C@8_>V=5bG%~=?=tAUL7hJEB{1f=gGYg z{omd~n9r$1hk8+dT~M^HQj~SGhFOp+HF_P`y0HIJ9-Y{cuHF=Kl+yr z@Bc;CT@(EocJVo?Xgt3>YsxYzjEcKNGk<3DuWx@m>Mff5=Emu`wvc!7*JRc$i@Q^5 zsHOl5DcQCV%qq6m-~v2n{pUp%!fa9dR5DZTA&c7{r>#VJ&B%tB4<7t%_^8i?-2`TK z|JpBrnHZp%SSkPq1a)^y1A$ESE0M2ffN>?SxnZLZ zZwGIlcmQx;m`f~RhVr@fJgRkxdDM%t#>E9)MyRywtdjOO=$_~0{Ng_~I$*~M&F|R= zd1~GRq$JdM$*NM&2)qBD{J7}5mw%bm(6|;!DtX1Rs|&Fc1dbsS?;eQ{%OF=YCm>4! z>b_X->M|jocJKGiMjL?P+8Q7WeA2zL(mx9!2GD1Nla(h&OrH*!eq)=Tmh(QpTDq4i z=21oMtzaBtr&r^7>Hd!tQ9@HZxje)@x@P;{o0@^>C3<8pJX}I_={X==1a>#(Q0Iv~ z%Iw|2UTCp-OdBzrSV028Q_;qcLg!NCV)1Z_JNE=`ecLWhJ(8xq$I- zC2%M+Vhf(rUwQ}7AH2R0uiO5HKVv)ZSZ;Tr1ZDCN(ht-tLjq4IWkWHPNO?g1f(#G5 zTM6?JY5$gKN~+BDu^pM>caE$-F45l4dvZh4db2HNG1&=`u)DO7Jv%ZszBX%+Z4FTp z`K;12nW&o4dlj{~HjvyLK2@js%FNHV?fZhaiB*#R8TJler=YAx-d(=+-A`9cBUpFJ z^pA#w`llay5-L`{XkWiB_-GWk=t&3bg+waV+@0q;ir%;qdf2#R5ZGq!8{0#6PPSUY zZ66af<|$c77rvP-*SRm)ORyC%HRWo+eZ*BmDd{5?D|tCMc)H>GrV)5QYn$rw#LJLIodiZEp{Xj%i@}9=ZIVkIAZrAUKDER%4uws zhJdR>!<;j;zV%IH)K%|nWQBs#?dGouA-AtbH_Cc%K<&E8-&(c|hIHc!5{9b(jNe8N zW?fnE?ae4JD;T6AeI@MP(2AJiSDBz%#(j`{T>oP^(k~SxDPlg+0CrDJWZEp# zOI?QAdtU?JsfQdgsy8>VwPfb27*b!|lCC-sPmI}s~ivlY98hKgOg2WaJj?K?= z5a6;`bo^JR0~rsW;OiqESk=-6ep^s_PFJ0VgRh*yQxWQ!Ra40H{L`tT^Pn9I>%8Awe*t20s(I)26V_9q zEDpO49A&{fv=uKnuByNqjxjPYF}XnI_bs^Tnz>}pTom=jUYDBvX-ZIkPw%7e za;6623%Iz8XJ~zfg!H4?FSQY$`n$`1k!W{Ft;sc(x|Zk9B1~A>J|&DJ*l714VRHpkUEqWYnbyAXS8MMeq@k>!#a`*OdQK&==)T66D7^>yvh8`) zeClUR_=$62Hs$?~UAICepsp$cs$nPjhCtUNyX7;YAZ-@cr^BtXu&exe!l*y z`2D&k`%UuD(6Dove)hJ#kf%HthxcvW5FjHpI^4ePLVo%scXiTu6CY&!S4fn7>w(%Y zZ3P0h<(7Q>i)Na2T4I;lz-kuGp0`EJk+^!o(bd5{OXa;b;R9r=63F&@iF4+>iJjs# z`Z~LYx)*je-~M#W>MYV}M*{d&;n>B+MVSl4vV_VK{f?$b#5?qP+_oQhw@FJPTn&^@ z0f)n
G&RMA{;&pQd0gFtN@n^@{ox5DDVjS1J78f|rtPUgz|3zxdP%BK69coASlL zz$^@uc3p*@>B+^=5~if5ui~?N;EwuCr~@4GmbPqY3SxE$xK){gSB|13gv8Y=VRVKv z@lvbv&%T2`93A35zgO<2S*q(5NK3n)`DXOC0{WXK=Zn7@;elbkE%x`>(0SxBSh8aga`W33+3J6a0TJ<0sJ+|R% z_AJeATPzjvdx*NG?0sMt5%6<~I0d#J!S|oL^DSOnWccjF3Ikq`5xt_J06iRlA!20k z>@@ksHqS~#J5R#B=&f=Mfk9jSqFYZvm10%THJ`#KEM$a1uBw?WL)jW82um++NF=-F zNqD(8Ipk!MMEa+BHP(2zsMUq^J5JDP_A~zqD@5mD$4cn~t^$}sAzV{i;Yt0C~5zETNifB|9gLc&&I(1Mr%uOmXqA_>OtuIGHb z>9nn?xI`0E6ngqn4^Eksv@gTi$IUl9^4Ss>>m4sMR8UNb>RK|8*mW*L1;6?UW{gYD z1h+iAVg%woqU9A;$hM;AvU^_&*JZf)iD22aqe)j9|85^H1y(*wyY({4qE+X_ZKCl*0f-dll@LY{(3&Jngq@#h*$=RBx0w>z%)}NFs5=_`Y1xyO>3jyxJs3<Un@J$8$A%5#rMM$f-L8X!L5>Vq0I}wP!66qw>M7T{4`&6~!PY%`a3H;i zc`&}3i~%1=TaE}D56q14ZK?#RrZ>2fLk5;(iIgIIRpje zR4EcIAKUOzKj`29e)w!T>!E%2aTU=C<{lY~^;;yT(x(ePXG={r<&PLoxf3nsM~fu3 zOe;Wl2@&_MuGsiIJz0N7Ou)j*y4(zyu@xw#_ZYt3{jmY*;K)OlaEgh&2Zv6F|};(9xvkv-;bIIy$V;^vk~mPiLW zZTx^Km7CiCT!1XZlhK?90E-aQ z6atkz6k0Qy7i?Xn^+;9;F_D+U1d}?}4Ar9hQnBsmwSc=5o{TsJ7NUsanOh?^&}03p6B=Oe7EyZO zKqE$6R!Y3y_C&MIK&X$)OrEjxvlgq~=Td3HgJv7X_Cu+>_Kh{lL=05>*_z5G4oza@ zHUqVC8R?VGp!(*CyWcvx9ub8yyk-{mjjX1PC66RiRO##@Qe|CU`Vyo#m9C@NnN0Tz z=DgX0&o{68%c@Qb7sgarBbq(WI#v)82Pam$uj(H~#+6?sV#D_oaIhr3mo0xMo%|&y z80s6KM(Ic)Z>whQq(y$0S0fudQxZxh{k2#FvQBplsw#T%@>^_be}51+nMx)J(Q7B_ zo5K12{(kOGj;kk&f8gb)VAFRL(yLTwMWF>y5C((c=oz!)J7_6+OWgSxjn)LBs?e|) z>nW9tLBph%BR{r%9F;BblarE4G9u)-TI_k?@8c~H>(Q%Vb4AS~Zp8W%X^Fb|ZoMc@ zg1zfAM9!3+{guA)xILUmTlKd`HjAl?f@IDMA zuR)K9dTBS1r$cgUXwFf@ID-RB?rWZckh;rjQF-R3O8I2}gMrDlwTQ<| z8#JDoH9%xh;{u}`+2Nb#aid5lUc$hG9dTg7TeLOb`q|SkMlul_MGu?#E1_#De!{0r#4VpOc!69Qt zaG%=n74cXJ`kf&_0|7?Y*p{V$w%R1My$FGHYCcOv#}GykBZL>(lm`hpukDp8YO_pa zGtV7=F_lRh+!6F54_acmZABP)2T89{v@gGGDEIX#htA%(kYwueL`-?$teF5#&oYj- z70(22#PH0x-GcLMl2Tot=8NESqhWEs-Je}gI@VE4*t+V|Er}AM?S!@OZDb~2dtSw< z6O@XlmV|!k$j#_w4$&gmhyK|I#=T4#oMv$X1c|A+lmsf2DjphE zs>QjKJey??#-t5my!BQy3bVs7nWqL)b&ZN^q2zE)`yN|k6z! zb7!JqC?nJIm!mGVm{npERSF$=ZzI9pUS8^WFH$IN+|I3b&`Cqzb^8zUlQv4&@5)6v z;~=)><1t?vZd`PW`|RVaR$C3g; zp1gbu<@$xEj{i72NX@kI?ZIO6<(}wenKSJ;57?=r>*4e@+9ngM{-@58lt0twyOUyz z7;;|54S~HpkP}35qP@i`a2L*9_@i4ZXHf8Iq_q|Gz4dJYk#_*Ym+!G@7R}Z}2y|XQ z)_UDOs`&I+lA@EDpTYJ8ax8yX}0PAWHE+9ao-7bWDu zoFmd6&8Dex@ah0*W)7hj#1vjU0^VV-SluwQ6jbhtihjRVhGQxRG=Vu>;!j}bi4tl( zDeAee!MuK>Zn9*rK;;LvJRa&*-XEobl_@4PWy~UJ9ARMOIDvs`Oj8akhrpfuh`xgc z+9~{&=wf;9`*&D$K1c_&FL6Zy-d7EWB>D2S%!D!YTswDR52UZ9torc=kWCe^P1P<{ zG+%`)dLTBZNN7NJA9E*(VTuRUQ|N2ZD%%OQ+F+a&;{99KxP6gg@R&>`a?+L*v81Dt zK7!m`L&lX?{x*7Mu{T`a{YpV2>VvYXo*1@yv+3y>Wb%x=^x1G5zZK}Y4W-{FHvuNI z4t1`?o3%?Y`YhV22g2VnEGSDMjeO2Ef)LsW?LJ5vcH!1`aI8JGkNIlB7(yymJDe7f zu~6H}*}P?XZ&$yiR=dPqY!s_Yfghs1NekKg<-Ty7_uG{B`xf~EqtCwm>e@Ax?=4gv z92)&GJnKSakyZG|xCc6X`{pq$WxcGyCb0p=l*c7YKQ=TxECP4P^~@G2j#Yvtt-}DQ z77;oUt|Mwbz`~AZa@5~`0O4EIokvX=w=yd|OPvs~-)EFeCCqy6ze;O>hxVn2_QDiI=9USaz%-iA&4JliEoz8$s-kK7;A4`&kQbVcxi%Zx!UFfrsNoaU;)J%i~JRMyLEN#UJv-s zX{YS64|={0#ICfFUv?f+rJ4tDZrs6R14>GkkH<8>Y83Pxpws;){ z>*V({J9)gwGBt>G_S7s5k|`#Q>i7%88H!K=vw+s^H5GUewog1`*@zkY#w)ru-jO|Xs!lDU5U7#}yk1e;g^qbX+>7Y4~YYnmYAe`*wl*pelJ+V$nF z!|idtKfrw15j`g-|AeIN%D(#GIS(VoN8!XXWK;I=tWXbIWoVK8^eB+IIQfs-&|$zN zHJqf5Dc1`$Y$Vq>Z4GNSIn|k4RD$ru^q+Y+-u`%cYIM|6q^FDEiaI?{z{bI--~R?# zBmNv?gRBv$Wug@$j~DCj8^Es%pplej4)zG2 z!GbWfI&j6&^73}ig=qJWONE#tXZO#g?ydX6K_>fY)l~v6WdcZI7cP)>ta_VoSG&TycXnapYlS|rHys6Catr)i{8y*^Z*9HwfFXmW$${@Xr{JS1v=(!t@ zcCc2cOjpKNO<|I&PWmeC;tNLELzRcii1o(=&8s?wGKCL8VwFtkcpO8@lP|>O53*Yr*UM-p(hd6ZL{XzP%e{ z`;`%>iAoaE1(QB};H(x)`tK{yqkk*@akGIX;lHZse_D{Kfc~u}1WLsJYrB86g8!`@ zK-2%v_Wq}KgupEHpB?-^wj=#N9sZxT_umifzqk9JPQd?dyZ_%O)T8hG_8yMR>?eeb Sv6l}H@R5^Nma3363H&dM@8p>P literal 0 HcmV?d00001 diff --git a/docs/res/linux_med.png b/docs/res/linux_med.png new file mode 100644 index 0000000000000000000000000000000000000000..905a7076bfc0179c96ac8914762048ac5a4dbd58 GIT binary patch literal 2131 zcmV-Z2(0&sP)P000>X1^@s6#OZ}&000CHX+uL$Nkc;* zP;zf(X>4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%U z0ZBwbR7efARb5D1XBd9Y$**zIlq5|xT%3y?|7uzHVvcZt8Zd7CDF4E5WQ^}9{&q>aC_kEujw5{>BnwjXt(gD3etv5C~9rclW7IjP>CUz77lw;7VUV=12aNa8f z5uk+#)O-dV9UZ5-(M1x8MDS4i0mVfH>CAcj;rs;OjoyMKU&QL_D#pjh>-I$#$<)*o z9Bx03FXrD-_30&u!s$9wPJhSh3Cp-`ynrfw~HCZC?=SJw&eh1>0}spP6H7K;GoZ%LA{u&^Kmtgo+w9oT1le*U8q_7FJePz%E5WplKR-@N0z! zzrT11Z8n>L(JT|t#hD1PNcQDSe_U>=-=zqzBcz7|}# z`Vd!NapKqLU1*thIBsk{MRIJe_oo}k-Y!FBWs)DrkbD}HP9t7@_X@uL{8KTyxm*rB zc%Lo>F2j7U4c1H*lCuo*0@W0Y_}ge4y;H+JRxWdr5yUhg#|Ff~2ue&81GxKE3=?(VK|a zF(t#!0`OI9`nDvW7{lxJa)AT@g-4}~Fl zUapm^w&0BX(W28t-`@g)1m&L!OvFm45|R!BHvJLw_V(6tR=2k4#l?20q=A@t&DO?x zaliKc08z)^>=(_H2svu?eKIA!c#R5SHjO`ibmE>TbN-kA{{w*=1AQb~K-vHR002ov JPDHLkV1gkL8(RPX literal 0 HcmV?d00001 diff --git a/docs/res/linux_sm.png b/docs/res/linux_sm.png new file mode 100644 index 0000000000000000000000000000000000000000..a71f801d6b61b71fac98c517d663b7500cec344d GIT binary patch literal 1695 zcmV;Q24MM#P)4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%S zO-V#SR5%f(lTCgg41)EAn#7#Lep#z%GA{av)B52oQcC~9?ZE70?E?NWw z5z#Unbm49wun%H3E?US)OoB;hN}2gFeuf|Kz0-RaMif74=)vLMckX?^_y3=BuYkWi zp-_k_l?r;j9^&yh^7*_l>k~FBIi9Q6>mVWwheO!ycBIp38ejjXc{m)V4+kG9u(heO z2ITYk$nAEY98@ZmsQT1OKVKQh1dzw$nG0HI&1SO+XKop>x}igB*93(aWLci+W+5mR zi_!PJ5~`W=c-?yf{n9B627{TPg-O5PPq|!=VVO*ZoK7b>9F7U#Tx*R+1GQQWf*|nrYe%5fYN1#xLKMX*i*rG#R0>wB6^f$p z(9V&n5bbuGm&0PQAQFj;RLulkyS`0YQ8kDpNj$Xoe!q_kb|>_DJs4wn@bLbq#xp^0 zAHU;m)>^G8(IDDrHk*OOZ3Kfs2u#G`Jh*fz^<9K$(1HCT=)Mw^PXzk1%Iz(U zOsW%M;{_;o7YM4GvNA{>V*S!OR0x;<`A3Q5eJ3RKN6OhXn6?kqe104;J#X)vrrOJu pQG1VfP000>X1^@s6#OZ}&000CHX+uL$Nkc;* zP;zf(X>4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%U zeMv+?R7ee#R!vA_R}}u<%X^9WSCbeMqsFN%wTMpbSf?PZEhr3#f^^|#pcJI-(BeY6 zkS<2Vj)g+UF1qQW1GCX?gf5ChORWoqf>Nl1IR3N)(lI&{Cu)qEnB?}{tM55a6Q^^+ zz3<%f^PQi2-Xp*TG&nfO>g($nndDs#hl6!=ba4HP(aOpSbGcl3feDRSB@L-{aIP5S!!3>n_?RI&7>w^_R4 zEw&qKWV_`}EK+ubZP0q%?q)v+YYp~+K!BxEDMONmsQvx@Yo>}2p{hJgS*=sx?eCD#QOTW0b3{&i3F2n*^t$%HYZhZRmf^= zE*7%d*!RAd*%npwxU7+F(b}YS&tJ=wGsDr`+^knaI$&&UjM?pWJ(D8Ej70hfuc%f) zHX&Bl2uKq4Sq4XBwM$Qlew-NnTFed%d#dpH)Y8cksOUi!6R~dnx$rBGdQ3@Rmk}_e=S^!Ds&$|SwTzWK3ZeTBJ%=?*G=uB>da)j ze|P|!ETgw5B;?vQF);yoetup%Vlavm#CB!CDf?Wh!YC;YqYrVXXXG8lE)oVwzs!M z0BwuY+pDbofl_M09cvgZ#XeD06M_@v&t+* zg}UxpyWZYjE+@|%)YH?$78VwAXmX31nwri9p-&Z?nVC5bA|mApmJ5=;ON>TeSB;L2 z>H$hhN|@j8*UlQW@u_>h08?36DL{vYhIB6@>Y0;n^*@=;&Q4~vTG{ycxWVHL?D?a? z`ho**Qz#TNSe!@WFK7;;an(jUhxSxNe>N~XJ9{d~!#x*_r4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%S zW=TXrR5%fhlFx4vK@`WoyDZcKg^~b5OXC{7YEs3-R1Rt)u}Kr+jf-cGJ$dqG_y_z4 zJeZJhF!j_s2Q(gfQX^7g8h@e*EhO8e2)jGJ8Ft2&64Ti%@9p<}-)G+J0!*!TyUk*; z7$f<=cez~Vb+t5={Y#YLQG6L~ZwQZ=OL`EdF^6ZppRhegx>dhQ{EC8n&RxTDz#7zA&ky27#<<4>)1@>gopp=)oN8eB);!pg?6{#;@$bH7_HpI!^Mo4=03qV zM5AfP1Z`zku^CR|K`br$jeftcKo)&gk&r&1{e;>ktR#FH1#ap!jv^~0VxCVw*yO~YTbytp?P3ggqj;wd2~OY9Yo$Kkpz zENfB}22I1|`6%v&7I2+~U>F9zd1KVwL)5JUzugZT8yl=xEV5Rs#gfS+Gfk7pMHaq^ zxPy_1h;xZWrOBF)ngrQAD>fPpCClb?IxT@GIQ0KH+v1=~r6SBH3I5YrFr`w7ZEbBS z>r9Xom1`>5-ri=NPDiQI&!hZm$2F02_bJgAG+NWOQ^K_2IB*s~`u_s;WR?3tP000^Y1^@s6LVfqm000CHX+uL$Nkc;* zP;zf(X>4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%R zSxH1eR7efQRXa`tK@9Z9|dbG&uns;vQTh6dZsP(9m&{P|++6NKur4VBeA_ z8RMDRiFP-ZR-W;A-t)7i*+N>hC%O36a8r3Jupj7P2kRqX9_9jn&d&Z+TZ24))D>H- zis5=Aa-XHdBLHwH4g#DfNxmM&|0>5t&oF~$S-;|p^T67bi36A>IJhe7+)&lOH>&<` zTZcTZgCv@O03_m`K1K|pWV;9y^8jMiUY@HYM}-CTRJEeS64|{wOi0&iLbI@7+*>vM zD0!Kjt}aK#kX6uYHMvTNLElNc>_gd4yC_L}J#@yKw#=%){rxEc`z~rc6?iDK|0gKG zng_0P+-HReS`BBC0^}2qJg%cNAuxT60D$=BHzx}eM*;^)V5i}UQkhX4U)?R_bCh-E3UAPx3qgPe9!egMEyguj(5f(HNq002ovPDHLk FV1h&yzB~W` literal 0 HcmV?d00001 diff --git a/docs/res/ubuntu_sm.png b/docs/res/ubuntu_sm.png new file mode 100644 index 0000000000000000000000000000000000000000..c93022f11e139cabeccd7f831f1e15ed8611f641 GIT binary patch literal 1366 zcmV-c1*!UpP)4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%R z1W80eR5%fxQ%w%RKp6d6B@(Q3W#cX+HcntC?&1nI4$u?WSV-K2l})W`A!z7peLT~t zs!1mE%{TA8?`LKl`D3i5dso^>jtx&Y-`3l>^CEex$YE?CFVY%NC*vDzXJgEtN3mi}UVG8Tm$OZGXRe!p^0Ou=LyT71=A Y51{{NqX%K$XaE2J07*qoM6N<$f&o^SqyPW_ literal 0 HcmV?d00001 diff --git a/docs/res/win_med.png b/docs/res/win_med.png new file mode 100644 index 0000000000000000000000000000000000000000..18d560fe0e6728f31a25a4338b0a40438b10b843 GIT binary patch literal 1494 zcmV;{1u6Q8P)P000>X1^@s6#OZ}&000CHX+uL$Nkc;* zP;zf(X>4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%R zgh@m}R7ef&md#4TKoG}glWfu$2?(V&SMdcrdlP!`qA$=V@F9Ex-$4)`z(b)Zp1jwy z;Jt|-h(bIhA={ZIyM?$xw-pc4g|N)-%>TDL*`0*JFSp5aCI(lpz&&fO91ph{TZM5q z&BXC&A*jG=Hz*1etUbut6d<(~qoun{+ z_ypItK$J$A(K;)LoZSta#g(lGpR!W)h5(`{f#HQS1%DBgu@%{)u(I18SGttGg we&cn{%`x4M34C<>4)IG4oG-opChBMQ3|#P9?DlWydH?_b07*qoM6N<$f>2P`uK)l5 literal 0 HcmV?d00001 diff --git a/docs/res/win_sm.png b/docs/res/win_sm.png new file mode 100644 index 0000000000000000000000000000000000000000..73d134a4f5173b5578043fa1b6b10804049116ad GIT binary patch literal 1348 zcmV-K1-tr*P)4Tx062|}Rb6NtRTMtEb7vzY&QokOg>Hg1+lHrgWS zWcKdPn90sKGrRqvPeo9CG3uKX#J{(IASm?@+di}}l?o-=)F3E6wD^Ni=!>T7nL9I? zX}YoAW$t|Qo$sD|?zw001?ah|SeB6#0T!CBEf+H4bBB+JJu8rehoBb*p;u8ID_yBf z0ya+zcePvJL&AGs+11_tpRKn>9TgyPA7ZoSs0)aX0r00)%XR^J`jH<$>RKN5V(7Oq zK*TS4xZz{h!*f1C3ECFkK$#7nA@pGN!$;%jYvwjAKwmYb0gKL(K8 z-kPtb5${A?tlI~wzMrJ6wTdBr=Y%%%EaEMQ&o}4FQ^DA)s*}Z>!FI&AHCpoWI|RUq zx?7s@$8!5^Q=anY%X@i5{QA6kNcMelpE>R6eCYFpmMsVTrI(b06~u#xf1yS} z_UGdMvD``!0~u->P=lA4?YN`hilQ|3tHka)7T{2CGqw zjZfMwx$5irQN_*|e4l)UHmiYuz74Yp1t^#>hrJ3-SOXDcC_o0^7T9R1gAN8V6s;5) zieI5-7aQlmJn}lUna#nz!j%5V$X|o`xX!dHWQRV27P1=rj;t2bW$~+pTw@bIek?Zv zKPDL<64`^#UNTAck#RBsB6*5DP4<%UA_FqU$I>2EH_cM;u)Q~SI+rg`Rn{L_AC5qq~L$#SMj%U z$6Cz0vP{G5Y*=%5RT^yu;}-DInZ=349rJPVM6C3K^oO)8y(fJr{l>k`ead~!ea?NsT>_Ci%bnxC;Vy6=b6>{xYV#Ue-+LB$ z7`JEXmTRm^AtP)R9u{)KHsMiWGV&)32xCG~*nyU<>-!d;FP=Re4r3qYr~6#KE>;1F z`>_J_P5xC?ROxV(DIHdCO*p$HRQI@7^PwV@Pvuf+5K}u-6REM(K@W$s zrgorh0{i?O)v0c>QtHxU-hBdD(>iYJ4b2sIOVX2K8m~4gmYVA5h^QEb$V`rCQ-|7Z zS{nuL-t>?3n=-o(6I(7vocj#GzCZEo`!3>+v;dYIfPu#&ZWzzX2i^rZ^Mu;6+rb@? zNPG+6)c5T6zxpzGe*M(x+{AON=PiJ>H#?ob-|uwRK0yDg0B4PV0id6JRZw95ZvX%Q z@<~KNR5%f1WFP??e*Wu!z}B}6tn5q}x)?D;&;%rxzWo3DHv}p0N_bG8PGjKCAG6=9i;)5NiQjndQ z0mNei#vmsL69Xqu9t8M+Y`8(LlB{?Yif}VCsBkkeFf%bS@Bnr3u|jO+V_{_AU}9v1 zN5914m;b@WqY)%WKbn4$xk#ZxvR)JogU=v%V1wLw=j)&U_2<4a?6~<2p2LX^Y!rh~ zJdK4TzUuY=KcD`neK0000 Note 1: Red Hat Enterprise Linux 6 and CentOS 6 require installing the specialized "rhel.6-x64" agent package +> Note 2: ARM instruction set [ARMv7](https://en.wikipedia.org/wiki/List_of_ARM_microarchitectures) or above is required, you can get your device's information by executing `uname -a` + +## Install .Net Core 2.x Linux Dependencies + +The `./config.sh` will check .Net Core 2.x dependencies during agent configuration. +You might see something like this which indicate a dependency's missing. +```bash +./config.sh + libunwind.so.8 => not found + libunwind-x86_64.so.8 => not found +Dependencies is missing for Dotnet Core 2.1 +Execute ./bin/installdependencies.sh to install any missing Dotnet Core 2.1 dependencies. +``` +You can easily correct the problem by executing `./bin/installdependencies.sh`. +The `installdependencies.sh` script should install all required dependencies on all supported Linux versions +> Note: The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`). You might need to deal with error coming from the package management mechanism related to your setup, like [#1353](https://github.com/Microsoft/vsts-agent/issues/1353) + +## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x) diff --git a/docs/start/envosx.md b/docs/start/envosx.md new file mode 100644 index 00000000000..13969600a25 --- /dev/null +++ b/docs/start/envosx.md @@ -0,0 +1,10 @@ + + +# ![osx](../res/apple_med.png) macOS/OS X System Prerequisites + +## Supported Versions + + - macOS Sierra (10.12) and later versions + + +## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/macos-prerequisites?tabs=netcore2x) diff --git a/docs/start/envwin.md b/docs/start/envwin.md new file mode 100644 index 00000000000..4dd316990ef --- /dev/null +++ b/docs/start/envwin.md @@ -0,0 +1,12 @@ +# ![win](../res/win_med.png) Windows System Prerequisites + +## Supported Versions + + - Windows 7 64-bit + - Windows 8.1 64-bit + - Windows 10 64-bit + - Windows Server 2008 R2 SP1 64-bit + - Windows Server 2012 R2 64-bit + - Windows Server 2016 64-bit + +## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore2x) diff --git a/images/arm/Dockerfile b/images/arm/Dockerfile new file mode 100644 index 00000000000..233468fd045 --- /dev/null +++ b/images/arm/Dockerfile @@ -0,0 +1,7 @@ +FROM mcr.microsoft.com/dotnet/core/runtime-deps:2.1 + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + curl \ + git \ + && rm -rf /var/lib/apt/lists/* diff --git a/images/centos6/Dockerfile b/images/centos6/Dockerfile new file mode 100644 index 00000000000..8270e68b987 --- /dev/null +++ b/images/centos6/Dockerfile @@ -0,0 +1,150 @@ +FROM centos:6 + +# Install dependencies + +RUN yum install -y \ + centos-release-SCL \ + epel-release \ + wget \ + unzip \ + && \ + rpm --import http://linuxsoft.cern.ch/cern/slc6X/x86_64/RPM-GPG-KEY-cern && \ + wget -O /etc/yum.repos.d/slc6-devtoolset.repo http://linuxsoft.cern.ch/cern/devtoolset/slc6-devtoolset.repo && \ + yum install -y \ + "perl(Time::HiRes)" \ + autoconf \ + cmake \ + cmake3 \ + devtoolset-2-toolchain \ + doxygen \ + expat-devel \ + gcc \ + gcc-c++ \ + gdb \ + gettext-devel \ + krb5-devel \ + libedit-devel \ + libidn-devel \ + libmetalink-devel \ + libnghttp2-devel \ + libssh2-devel \ + libunwind-devel \ + libuuid-devel \ + lttng-ust-devel \ + lzma \ + ncurses-devel \ + openssl-devel \ + perl-devel \ + python-argparse \ + python27 \ + readline-devel \ + swig \ + xz \ + zlib-devel \ + && \ + yum clean all + +# Build and install clang and lldb 3.9.1 + +RUN wget ftp://sourceware.org/pub/binutils/snapshots/binutils-2.29.1.tar.xz && \ + wget http://releases.llvm.org/3.9.1/cfe-3.9.1.src.tar.xz && \ + wget http://releases.llvm.org/3.9.1/llvm-3.9.1.src.tar.xz && \ + wget http://releases.llvm.org/3.9.1/lldb-3.9.1.src.tar.xz && \ + wget http://releases.llvm.org/3.9.1/compiler-rt-3.9.1.src.tar.xz && \ + \ + tar -xf binutils-2.29.1.tar.xz && \ + tar -xf llvm-3.9.1.src.tar.xz && \ + mkdir llvm-3.9.1.src/tools/clang && \ + mkdir llvm-3.9.1.src/tools/lldb && \ + mkdir llvm-3.9.1.src/projects/compiler-rt && \ + tar -xf cfe-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/tools/clang && \ + tar -xf lldb-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/tools/lldb && \ + tar -xf compiler-rt-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/projects/compiler-rt && \ + rm binutils-2.29.1.tar.xz && \ + rm cfe-3.9.1.src.tar.xz && \ + rm lldb-3.9.1.src.tar.xz && \ + rm llvm-3.9.1.src.tar.xz && \ + rm compiler-rt-3.9.1.src.tar.xz && \ + \ + mkdir llvmbuild && \ + cd llvmbuild && \ + scl enable python27 devtoolset-2 \ + ' \ + cmake3 \ + -DCMAKE_CXX_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/g++ \ + -DCMAKE_C_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/gcc \ + -DCMAKE_LINKER=/opt/rh/devtoolset-2/root/usr/bin/ld \ + -DCMAKE_BUILD_TYPE=Release \ + -DLLVM_LIBDIR_SUFFIX=64 \ + -DLLVM_ENABLE_EH=1 \ + -DLLVM_ENABLE_RTTI=1 \ + -DLLVM_BINUTILS_INCDIR=../binutils-2.29.1/include \ + ../llvm-3.9.1.src \ + && \ + make -j $(($(getconf _NPROCESSORS_ONLN)+1)) && \ + make install \ + ' && \ + cd .. && \ + rm -r llvmbuild && \ + rm -r llvm-3.9.1.src && \ + rm -r binutils-2.29.1 + +# Build and install curl 7.45.0 + +RUN wget https://curl.haxx.se/download/curl-7.45.0.tar.lzma && \ + tar -xf curl-7.45.0.tar.lzma && \ + rm curl-7.45.0.tar.lzma && \ + cd curl-7.45.0 && \ + scl enable python27 devtoolset-2 \ + ' \ + ./configure \ + --disable-dict \ + --disable-ftp \ + --disable-gopher \ + --disable-imap \ + --disable-ldap \ + --disable-ldaps \ + --disable-libcurl-option \ + --disable-manual \ + --disable-pop3 \ + --disable-rtsp \ + --disable-smb \ + --disable-smtp \ + --disable-telnet \ + --disable-tftp \ + --enable-ipv6 \ + --enable-optimize \ + --enable-symbol-hiding \ + --with-ca-bundle=/etc/pki/tls/certs/ca-bundle.crt \ + --with-nghttp2 \ + --with-gssapi \ + --with-ssl \ + --without-librtmp \ + && \ + make install \ + ' && \ + cd .. && \ + rm -r curl-7.45.0 + +# Install ICU 57.1 + +RUN wget http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-RHEL6-x64.tgz && \ + tar -xf icu4c-57_1-RHEL6-x64.tgz -C / && \ + rm icu4c-57_1-RHEL6-x64.tgz + +# Compile and install a version of the git that supports the features that cli repo build needs +# NOTE: The git needs to be built after the curl so that it can use the libcurl to add https +# protocol support. +RUN \ + wget https://www.kernel.org/pub/software/scm/git/git-2.9.5.tar.gz && \ + tar -xf git-2.9.5.tar.gz && \ + rm git-2.9.5.tar.gz && \ + cd git-2.9.5 && \ + make configure && \ + ./configure --prefix=/usr/local --without-tcltk && \ + make -j $(nproc --all) all && \ + make install && \ + cd .. && \ + rm -r git-2.9.5 + +ENV LD_LIBRARY_PATH=/usr/local/lib diff --git a/nonwindows.template.yml b/nonwindows.template.yml new file mode 100644 index 00000000000..833bd0500ba --- /dev/null +++ b/nonwindows.template.yml @@ -0,0 +1,29 @@ +steps: + +# Build agent layout +- script: ./dev.sh layout Release + workingDirectory: src + displayName: Build & Layout Release + +# Run test +- script: ./dev.sh test + workingDirectory: src + displayName: Test + +# # Publish test results +# - task: PublishTestResults@2 +# displayName: Publish Test Results **/*.trx +# condition: always() +# inputs: +# testRunner: VSTest +# testResultsFiles: '**/*.trx' +# testRunTitle: 'Agent Tests' + +# # Upload test log +# - task: PublishBuildArtifacts@1 +# displayName: Publish Test logs +# condition: always() +# inputs: +# pathToPublish: src/Test/TestLogs +# artifactName: $(System.JobId) +# artifactType: container diff --git a/releaseNote.md b/releaseNote.md new file mode 100644 index 00000000000..e6e2e888239 --- /dev/null +++ b/releaseNote.md @@ -0,0 +1,44 @@ +## Features + - Runner config auth via GitHub.com. (#107) (#117) + - Adding wrapper action to support post job cleanup, adding checkout v1.1 (#91) + - Improving terminal experience (#110) + - Add runner support for cache action. (#120) + +## Bugs + - Set GITHUB_ACTIONS in containers. (#119) + - Fix issue data column/col mismatch. (#122) + +## Misc + - Use GitHub actions for CI/PR (#112) + - Code Cleanup (#123) (#124) (#125) + +## Agent Downloads + +| | Package | +| ------- | ----------------------------------------------------------------------------------------------------------- | +| Windows x64 | [actions-runner-win-x64-.zip](https://githubassets.azureedge.net/runners//actions-runner-win-x64-.zip) | +| macOS | [actions-runner-osx-x64-.tar.gz](https://githubassets.azureedge.net/runners//actions-runner-osx-x64-.tar.gz) | +| Linux x64 | [actions-runner-linux-x64-.tar.gz](https://githubassets.azureedge.net/runners//actions-runner-linux-x64-.tar.gz) | + +After Download: + +## Windows x64 + +``` bash +C:\> mkdir myagent && cd myagent +C:\myagent> Add-Type -AssemblyName System.IO.Compression.FileSystem ; [System.IO.Compression.ZipFile]::ExtractToDirectory("$HOME\Downloads\actions-runner-win-x64-.zip", "$PWD") +``` + +## OSX + +``` bash +~/$ mkdir myagent && cd myagent +~/myagent$ tar xzf ~/Downloads/actions-runner-osx-x64-.tar.gz +``` + +## Linux x64 + +``` bash +~/$ mkdir myagent && cd myagent +~/myagent$ tar xzf ~/Downloads/actions-runner-linux-x64-.tar.gz +``` diff --git a/src/Misc/dotnet-install.ps1 b/src/Misc/dotnet-install.ps1 new file mode 100644 index 00000000000..2ff62da5aaf --- /dev/null +++ b/src/Misc/dotnet-install.ps1 @@ -0,0 +1,650 @@ +# +# Copyright (c) .NET Foundation and contributors. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for full license information. +# + +<# +.SYNOPSIS + Installs dotnet cli +.DESCRIPTION + Installs dotnet cli. If dotnet installation already exists in the given directory + it will update it only if the requested version differs from the one already installed. +.PARAMETER Channel + Default: LTS + Download from the Channel specified. Possible values: + - Current - most current release + - LTS - most current supported release + - 2-part version in a format A.B - represents a specific release + examples: 2.0, 1.0 + - Branch name + examples: release/2.0.0, Master + Note: The version parameter overrides the channel parameter. +.PARAMETER Version + Default: latest + Represents a build version on specific channel. Possible values: + - latest - most latest build on specific channel + - coherent - most latest coherent build on specific channel + coherent applies only to SDK downloads + - 3-part version in a format A.B.C - represents specific version of build + examples: 2.0.0-preview2-006120, 1.1.0 +.PARAMETER InstallDir + Default: %LocalAppData%\Microsoft\dotnet + Path to where to install dotnet. Note that binaries will be placed directly in a given directory. +.PARAMETER Architecture + Default: - this value represents currently running OS architecture + Architecture of dotnet binaries to be installed. + Possible values are: , amd64, x64, x86, arm64, arm +.PARAMETER SharedRuntime + This parameter is obsolete and may be removed in a future version of this script. + The recommended alternative is '-Runtime dotnet'. + + Default: false + Installs just the shared runtime bits, not the entire SDK. + This is equivalent to specifying `-Runtime dotnet`. +.PARAMETER Runtime + Installs just a shared runtime, not the entire SDK. + Possible values: + - dotnet - the Microsoft.NETCore.App shared runtime + - aspnetcore - the Microsoft.AspNetCore.App shared runtime + - windowsdesktop - the Microsoft.WindowsDesktop.App shared runtime +.PARAMETER DryRun + If set it will not perform installation but instead display what command line to use to consistently install + currently requested version of dotnet cli. In example if you specify version 'latest' it will display a link + with specific version so that this command can be used deterministicly in a build script. + It also displays binaries location if you prefer to install or download it yourself. +.PARAMETER NoPath + By default this script will set environment variable PATH for the current process to the binaries folder inside installation folder. + If set it will display binaries location but not set any environment variable. +.PARAMETER Verbose + Displays diagnostics information. +.PARAMETER AzureFeed + Default: https://dotnetcli.azureedge.net/dotnet + This parameter typically is not changed by the user. + It allows changing the URL for the Azure feed used by this installer. +.PARAMETER UncachedFeed + This parameter typically is not changed by the user. + It allows changing the URL for the Uncached feed used by this installer. +.PARAMETER FeedCredential + Used as a query string to append to the Azure feed. + It allows changing the URL to use non-public blob storage accounts. +.PARAMETER ProxyAddress + If set, the installer will use the proxy when making web requests +.PARAMETER ProxyUseDefaultCredentials + Default: false + Use default credentials, when using proxy address. +.PARAMETER SkipNonVersionedFiles + Default: false + Skips installing non-versioned files if they already exist, such as dotnet.exe. +.PARAMETER NoCdn + Disable downloading from the Azure CDN, and use the uncached feed directly. +#> +[cmdletbinding()] +param( + [string]$Channel="LTS", + [string]$Version="Latest", + [string]$InstallDir="", + [string]$Architecture="", + [ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)] + [string]$Runtime, + [Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")] + [switch]$SharedRuntime, + [switch]$DryRun, + [switch]$NoPath, + [string]$AzureFeed="https://dotnetcli.azureedge.net/dotnet", + [string]$UncachedFeed="https://dotnetcli.blob.core.windows.net/dotnet", + [string]$FeedCredential, + [string]$ProxyAddress, + [switch]$ProxyUseDefaultCredentials, + [switch]$SkipNonVersionedFiles, + [switch]$NoCdn +) + +Set-StrictMode -Version Latest +$ErrorActionPreference="Stop" +$ProgressPreference="SilentlyContinue" + +if ($NoCdn) { + $AzureFeed = $UncachedFeed +} + +$BinFolderRelativePath="" + +if ($SharedRuntime -and (-not $Runtime)) { + $Runtime = "dotnet" +} + +# example path with regex: shared/1.0.0-beta-12345/somepath +$VersionRegEx="/\d+\.\d+[^/]+/" +$OverrideNonVersionedFiles = !$SkipNonVersionedFiles + +function Say($str) { + Write-Host "dotnet-install: $str" +} + +function Say-Verbose($str) { + Write-Verbose "dotnet-install: $str" +} + +function Say-Invocation($Invocation) { + $command = $Invocation.MyCommand; + $args = (($Invocation.BoundParameters.Keys | foreach { "-$_ `"$($Invocation.BoundParameters[$_])`"" }) -join " ") + Say-Verbose "$command $args" +} + +function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) { + $Attempts = 0 + + while ($true) { + try { + return $ScriptBlock.Invoke() + } + catch { + $Attempts++ + if ($Attempts -lt $MaxAttempts) { + Start-Sleep $SecondsBetweenAttempts + } + else { + throw + } + } + } +} + +function Get-Machine-Architecture() { + Say-Invocation $MyInvocation + + # possible values: amd64, x64, x86, arm64, arm + return $ENV:PROCESSOR_ARCHITECTURE +} + +function Get-CLIArchitecture-From-Architecture([string]$Architecture) { + Say-Invocation $MyInvocation + + switch ($Architecture.ToLower()) { + { $_ -eq "" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) } + { ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" } + { $_ -eq "x86" } { return "x86" } + { $_ -eq "arm" } { return "arm" } + { $_ -eq "arm64" } { return "arm64" } + default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/cli/issues" } + } +} + +# The version text returned from the feeds is a 1-line or 2-line string: +# For the SDK and the dotnet runtime (2 lines): +# Line 1: # commit_hash +# Line 2: # 4-part version +# For the aspnetcore runtime (1 line): +# Line 1: # 4-part version +function Get-Version-Info-From-Version-Text([string]$VersionText) { + Say-Invocation $MyInvocation + + $Data = -split $VersionText + + $VersionInfo = @{ + CommitHash = $(if ($Data.Count -gt 1) { $Data[0] }) + Version = $Data[-1] # last line is always the version number. + } + return $VersionInfo +} + +function Load-Assembly([string] $Assembly) { + try { + Add-Type -Assembly $Assembly | Out-Null + } + catch { + # On Nano Server, Powershell Core Edition is used. Add-Type is unable to resolve base class assemblies because they are not GAC'd. + # Loading the base class assemblies is not unnecessary as the types will automatically get resolved. + } +} + +function GetHTTPResponse([Uri] $Uri) +{ + Invoke-With-Retry( + { + + $HttpClient = $null + + try { + # HttpClient is used vs Invoke-WebRequest in order to support Nano Server which doesn't support the Invoke-WebRequest cmdlet. + Load-Assembly -Assembly System.Net.Http + + if(-not $ProxyAddress) { + try { + # Despite no proxy being explicitly specified, we may still be behind a default proxy + $DefaultProxy = [System.Net.WebRequest]::DefaultWebProxy; + if($DefaultProxy -and (-not $DefaultProxy.IsBypassed($Uri))) { + $ProxyAddress = $DefaultProxy.GetProxy($Uri).OriginalString + $ProxyUseDefaultCredentials = $true + } + } catch { + # Eat the exception and move forward as the above code is an attempt + # at resolving the DefaultProxy that may not have been a problem. + $ProxyAddress = $null + Say-Verbose("Exception ignored: $_.Exception.Message - moving forward...") + } + } + + if($ProxyAddress) { + $HttpClientHandler = New-Object System.Net.Http.HttpClientHandler + $HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{Address=$ProxyAddress;UseDefaultCredentials=$ProxyUseDefaultCredentials} + $HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler + } + else { + + $HttpClient = New-Object System.Net.Http.HttpClient + } + # Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out + # 20 minutes allows it to work over much slower connections. + $HttpClient.Timeout = New-TimeSpan -Minutes 20 + $Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result + if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) { + # The feed credential is potentially sensitive info. Do not log FeedCredential to console output. + $ErrorMsg = "Failed to download $Uri." + if ($Response -ne $null) { + $ErrorMsg += " $Response" + } + + throw $ErrorMsg + } + + return $Response + } + finally { + if ($HttpClient -ne $null) { + $HttpClient.Dispose() + } + } + }) +} + + +function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) { + Say-Invocation $MyInvocation + + $VersionFileUrl = $null + if ($Runtime -eq "dotnet") { + $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version" + } + elseif ($Runtime -eq "aspnetcore") { + $VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version" + } + # Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime + elseif ($Runtime -eq "windowsdesktop") { + $VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version" + } + elseif (-not $Runtime) { + if ($Coherent) { + $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version" + } + else { + $VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version" + } + } + else { + throw "Invalid value for `$Runtime" + } + try { + $Response = GetHTTPResponse -Uri $VersionFileUrl + } + catch { + throw "Could not resolve version information." + } + $StringContent = $Response.Content.ReadAsStringAsync().Result + + switch ($Response.Content.Headers.ContentType) { + { ($_ -eq "application/octet-stream") } { $VersionText = $StringContent } + { ($_ -eq "text/plain") } { $VersionText = $StringContent } + { ($_ -eq "text/plain; charset=UTF-8") } { $VersionText = $StringContent } + default { throw "``$Response.Content.Headers.ContentType`` is an unknown .version file content type." } + } + + $VersionInfo = Get-Version-Info-From-Version-Text $VersionText + + return $VersionInfo +} + + +function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel, [string]$Version) { + Say-Invocation $MyInvocation + + switch ($Version.ToLower()) { + { $_ -eq "latest" } { + $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False + return $LatestVersionInfo.Version + } + { $_ -eq "coherent" } { + $LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True + return $LatestVersionInfo.Version + } + default { return $Version } + } +} + +function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) { + Say-Invocation $MyInvocation + + if ($Runtime -eq "dotnet") { + $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificVersion-win-$CLIArchitecture.zip" + } + elseif ($Runtime -eq "aspnetcore") { + $PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificVersion-win-$CLIArchitecture.zip" + } + elseif ($Runtime -eq "windowsdesktop") { + $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificVersion-win-$CLIArchitecture.zip" + } + elseif (-not $Runtime) { + $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificVersion-win-$CLIArchitecture.zip" + } + else { + throw "Invalid value for `$Runtime" + } + + Say-Verbose "Constructed primary named payload URL: $PayloadURL" + + return $PayloadURL +} + +function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) { + Say-Invocation $MyInvocation + + if (-not $Runtime) { + $PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-dev-win-$CLIArchitecture.$SpecificVersion.zip" + } + elseif ($Runtime -eq "dotnet") { + $PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-win-$CLIArchitecture.$SpecificVersion.zip" + } + else { + return $null + } + + Say-Verbose "Constructed legacy named payload URL: $PayloadURL" + + return $PayloadURL +} + +function Get-User-Share-Path() { + Say-Invocation $MyInvocation + + $InstallRoot = $env:DOTNET_INSTALL_DIR + if (!$InstallRoot) { + $InstallRoot = "$env:LocalAppData\Microsoft\dotnet" + } + return $InstallRoot +} + +function Resolve-Installation-Path([string]$InstallDir) { + Say-Invocation $MyInvocation + + if ($InstallDir -eq "") { + return Get-User-Share-Path + } + return $InstallDir +} + +function Get-Version-Info-From-Version-File([string]$InstallRoot, [string]$RelativePathToVersionFile) { + Say-Invocation $MyInvocation + + $VersionFile = Join-Path -Path $InstallRoot -ChildPath $RelativePathToVersionFile + Say-Verbose "Local version file: $VersionFile" + + if (Test-Path $VersionFile) { + $VersionText = cat $VersionFile + Say-Verbose "Local version file text: $VersionText" + return Get-Version-Info-From-Version-Text $VersionText + } + + Say-Verbose "Local version file not found." + + return $null +} + +function Is-Dotnet-Package-Installed([string]$InstallRoot, [string]$RelativePathToPackage, [string]$SpecificVersion) { + Say-Invocation $MyInvocation + + $DotnetPackagePath = Join-Path -Path $InstallRoot -ChildPath $RelativePathToPackage | Join-Path -ChildPath $SpecificVersion + Say-Verbose "Is-Dotnet-Package-Installed: Path to a package: $DotnetPackagePath" + return Test-Path $DotnetPackagePath -PathType Container +} + +function Get-Absolute-Path([string]$RelativeOrAbsolutePath) { + # Too much spam + # Say-Invocation $MyInvocation + + return $ExecutionContext.SessionState.Path.GetUnresolvedProviderPathFromPSPath($RelativeOrAbsolutePath) +} + +function Get-Path-Prefix-With-Version($path) { + $match = [regex]::match($path, $VersionRegEx) + if ($match.Success) { + return $entry.FullName.Substring(0, $match.Index + $match.Length) + } + + return $null +} + +function Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package([System.IO.Compression.ZipArchive]$Zip, [string]$OutPath) { + Say-Invocation $MyInvocation + + $ret = @() + foreach ($entry in $Zip.Entries) { + $dir = Get-Path-Prefix-With-Version $entry.FullName + if ($dir -ne $null) { + $path = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $dir) + if (-Not (Test-Path $path -PathType Container)) { + $ret += $dir + } + } + } + + $ret = $ret | Sort-Object | Get-Unique + + $values = ($ret | foreach { "$_" }) -join ";" + Say-Verbose "Directories to unpack: $values" + + return $ret +} + +# Example zip content and extraction algorithm: +# Rule: files if extracted are always being extracted to the same relative path locally +# .\ +# a.exe # file does not exist locally, extract +# b.dll # file exists locally, override only if $OverrideFiles set +# aaa\ # same rules as for files +# ... +# abc\1.0.0\ # directory contains version and exists locally +# ... # do not extract content under versioned part +# abc\asd\ # same rules as for files +# ... +# def\ghi\1.0.1\ # directory contains version and does not exist locally +# ... # extract content +function Extract-Dotnet-Package([string]$ZipPath, [string]$OutPath) { + Say-Invocation $MyInvocation + + Load-Assembly -Assembly System.IO.Compression.FileSystem + Set-Variable -Name Zip + try { + $Zip = [System.IO.Compression.ZipFile]::OpenRead($ZipPath) + + $DirectoriesToUnpack = Get-List-Of-Directories-And-Versions-To-Unpack-From-Dotnet-Package -Zip $Zip -OutPath $OutPath + + foreach ($entry in $Zip.Entries) { + $PathWithVersion = Get-Path-Prefix-With-Version $entry.FullName + if (($PathWithVersion -eq $null) -Or ($DirectoriesToUnpack -contains $PathWithVersion)) { + $DestinationPath = Get-Absolute-Path $(Join-Path -Path $OutPath -ChildPath $entry.FullName) + $DestinationDir = Split-Path -Parent $DestinationPath + $OverrideFiles=$OverrideNonVersionedFiles -Or (-Not (Test-Path $DestinationPath)) + if ((-Not $DestinationPath.EndsWith("\")) -And $OverrideFiles) { + New-Item -ItemType Directory -Force -Path $DestinationDir | Out-Null + [System.IO.Compression.ZipFileExtensions]::ExtractToFile($entry, $DestinationPath, $OverrideNonVersionedFiles) + } + } + } + } + finally { + if ($Zip -ne $null) { + $Zip.Dispose() + } + } +} + +function DownloadFile($Source, [string]$OutPath) { + if ($Source -notlike "http*") { + # Using System.IO.Path.GetFullPath to get the current directory + # does not work in this context - $pwd gives the current directory + if (![System.IO.Path]::IsPathRooted($Source)) { + $Source = $(Join-Path -Path $pwd -ChildPath $Source) + } + $Source = Get-Absolute-Path $Source + Say "Copying file from $Source to $OutPath" + Copy-Item $Source $OutPath + return + } + + $Stream = $null + + try { + $Response = GetHTTPResponse -Uri $Source + $Stream = $Response.Content.ReadAsStreamAsync().Result + $File = [System.IO.File]::Create($OutPath) + $Stream.CopyTo($File) + $File.Close() + } + finally { + if ($Stream -ne $null) { + $Stream.Dispose() + } + } +} + +function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) { + $BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath) + if (-Not $NoPath) { + $SuffixedBinPath = "$BinPath;" + if (-Not $env:path.Contains($SuffixedBinPath)) { + Say "Adding to current process PATH: `"$BinPath`". Note: This change will not be visible if PowerShell was run as a child process." + $env:path = $SuffixedBinPath + $env:path + } else { + Say-Verbose "Current process PATH already contains `"$BinPath`"" + } + } + else { + Say "Binaries of dotnet can be found in $BinPath" + } +} + +$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture +$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version +$DownloadLink = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture +$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture + +$InstallRoot = Resolve-Installation-Path $InstallDir +Say-Verbose "InstallRoot: $InstallRoot" +$ScriptName = $MyInvocation.MyCommand.Name + +if ($DryRun) { + Say "Payload URLs:" + Say "Primary named payload URL: $DownloadLink" + if ($LegacyDownloadLink) { + Say "Legacy named payload URL: $LegacyDownloadLink" + } + $RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`"" + if ($Runtime -eq "dotnet") { + $RepeatableCommand+=" -Runtime `"dotnet`"" + } + elseif ($Runtime -eq "aspnetcore") { + $RepeatableCommand+=" -Runtime `"aspnetcore`"" + } + foreach ($key in $MyInvocation.BoundParameters.Keys) { + if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) { + $RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`"" + } + } + Say "Repeatable invocation: $RepeatableCommand" + exit 0 +} + +if ($Runtime -eq "dotnet") { + $assetName = ".NET Core Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.NETCore.App" +} +elseif ($Runtime -eq "aspnetcore") { + $assetName = "ASP.NET Core Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.AspNetCore.App" +} +elseif ($Runtime -eq "windowsdesktop") { + $assetName = ".NET Core Windows Desktop Runtime" + $dotnetPackageRelativePath = "shared\Microsoft.WindowsDesktop.App" +} +elseif (-not $Runtime) { + $assetName = ".NET Core SDK" + $dotnetPackageRelativePath = "sdk" +} +else { + throw "Invalid value for `$Runtime" +} + +# Check if the SDK version is already installed. +$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion +if ($isAssetInstalled) { + Say "$assetName version $SpecificVersion is already installed." + Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath + exit 0 +} + +New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null + +$installDrive = $((Get-Item $InstallRoot).PSDrive.Name); +$diskInfo = Get-PSDrive -Name $installDrive +if ($diskInfo.Free / 1MB -le 100) { + Say "There is not enough disk space on drive ${installDrive}:" + exit 0 +} + +$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()) +Say-Verbose "Zip path: $ZipPath" + +$DownloadFailed = $false +Say "Downloading link: $DownloadLink" +try { + DownloadFile -Source $DownloadLink -OutPath $ZipPath +} +catch { + Say "Cannot download: $DownloadLink" + if ($LegacyDownloadLink) { + $DownloadLink = $LegacyDownloadLink + $ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName()) + Say-Verbose "Legacy zip path: $ZipPath" + Say "Downloading legacy link: $DownloadLink" + try { + DownloadFile -Source $DownloadLink -OutPath $ZipPath + } + catch { + Say "Cannot download: $DownloadLink" + $DownloadFailed = $true + } + } + else { + $DownloadFailed = $true + } +} + +if ($DownloadFailed) { + throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" +} + +Say "Extracting zip from $DownloadLink" +Extract-Dotnet-Package -ZipPath $ZipPath -OutPath $InstallRoot + +# Check if the SDK version is now installed; if not, fail the installation. +$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion +if (!$isAssetInstalled) { + throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error." +} + +Remove-Item $ZipPath + +Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath + +Say "Installation finished" +exit 0 diff --git a/src/Misc/dotnet-install.sh b/src/Misc/dotnet-install.sh new file mode 100755 index 00000000000..61337044fdb --- /dev/null +++ b/src/Misc/dotnet-install.sh @@ -0,0 +1,1025 @@ +#!/usr/bin/env bash +# Copyright (c) .NET Foundation and contributors. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for full license information. +# + +# Stop script on NZEC +set -e +# Stop script if unbound variable found (use ${var:-} if intentional) +set -u +# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success +# This is causing it to fail +set -o pipefail + +# Use in the the functions: eval $invocation +invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"' + +# standard output may be used as a return value in the functions +# we need a way to write text on the screen in the functions so that +# it won't interfere with the return value. +# Exposing stream 3 as a pipe to standard output of the script itself +exec 3>&1 + +# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors. +# See if stdout is a terminal +if [ -t 1 ] && command -v tput > /dev/null; then + # see if it supports colors + ncolors=$(tput colors) + if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then + bold="$(tput bold || echo)" + normal="$(tput sgr0 || echo)" + black="$(tput setaf 0 || echo)" + red="$(tput setaf 1 || echo)" + green="$(tput setaf 2 || echo)" + yellow="$(tput setaf 3 || echo)" + blue="$(tput setaf 4 || echo)" + magenta="$(tput setaf 5 || echo)" + cyan="$(tput setaf 6 || echo)" + white="$(tput setaf 7 || echo)" + fi +fi + +say_warning() { + printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" +} + +say_err() { + printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2 +} + +say() { + # using stream 3 (defined in the beginning) to not interfere with stdout of functions + # which may be used as return value + printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3 +} + +say_verbose() { + if [ "$verbose" = true ]; then + say "$1" + fi +} + +# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets, +# then and only then should the Linux distribution appear in this list. +# Adding a Linux distribution to this list does not imply distribution-specific support. +get_legacy_os_name_from_platform() { + eval $invocation + + platform="$1" + case "$platform" in + "centos.7") + echo "centos" + return 0 + ;; + "debian.8") + echo "debian" + return 0 + ;; + "debian.9") + echo "debian.9" + return 0 + ;; + "fedora.23") + echo "fedora.23" + return 0 + ;; + "fedora.24") + echo "fedora.24" + return 0 + ;; + "fedora.27") + echo "fedora.27" + return 0 + ;; + "fedora.28") + echo "fedora.28" + return 0 + ;; + "opensuse.13.2") + echo "opensuse.13.2" + return 0 + ;; + "opensuse.42.1") + echo "opensuse.42.1" + return 0 + ;; + "opensuse.42.3") + echo "opensuse.42.3" + return 0 + ;; + "rhel.7"*) + echo "rhel" + return 0 + ;; + "ubuntu.14.04") + echo "ubuntu" + return 0 + ;; + "ubuntu.16.04") + echo "ubuntu.16.04" + return 0 + ;; + "ubuntu.16.10") + echo "ubuntu.16.10" + return 0 + ;; + "ubuntu.18.04") + echo "ubuntu.18.04" + return 0 + ;; + "alpine.3.4.3") + echo "alpine" + return 0 + ;; + esac + return 1 +} + +get_linux_platform_name() { + eval $invocation + + if [ -n "$runtime_id" ]; then + echo "${runtime_id%-*}" + return 0 + else + if [ -e /etc/os-release ]; then + . /etc/os-release + echo "$ID.$VERSION_ID" + return 0 + elif [ -e /etc/redhat-release ]; then + local redhatRelease=$( /dev/null 2>&1 + return $? +} + + +check_min_reqs() { + local hasMinimum=false + if machine_has "curl"; then + hasMinimum=true + elif machine_has "wget"; then + hasMinimum=true + fi + + if [ "$hasMinimum" = "false" ]; then + say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed." + return 1 + fi + return 0 +} + +check_pre_reqs() { + eval $invocation + + if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then + return 0 + fi + + if [ "$(uname)" = "Linux" ]; then + if [ ! -x "$(command -v ldconfig)" ]; then + echo "ldconfig is not in PATH, trying /sbin/ldconfig." + LDCONFIG_COMMAND="/sbin/ldconfig" + else + LDCONFIG_COMMAND="ldconfig" + fi + + local librarypath=${LD_LIBRARY_PATH:-} + LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }" + + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libunwind)" ] && say_warning "Unable to locate libunwind. Probable prerequisite missing; install libunwind." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu." + [ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep -F libcurl.so)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl." + fi + + return 0 +} + +# args: +# input - $1 +to_lowercase() { + #eval $invocation + + echo "$1" | tr '[:upper:]' '[:lower:]' + return 0 +} + +# args: +# input - $1 +remove_trailing_slash() { + #eval $invocation + + local input="${1:-}" + echo "${input%/}" + return 0 +} + +# args: +# input - $1 +remove_beginning_slash() { + #eval $invocation + + local input="${1:-}" + echo "${input#/}" + return 0 +} + +# args: +# root_path - $1 +# child_path - $2 - this parameter can be empty +combine_paths() { + eval $invocation + + # TODO: Consider making it work with any number of paths. For now: + if [ ! -z "${3:-}" ]; then + say_err "combine_paths: Function takes two parameters." + return 1 + fi + + local root_path="$(remove_trailing_slash "$1")" + local child_path="$(remove_beginning_slash "${2:-}")" + say_verbose "combine_paths: root_path=$root_path" + say_verbose "combine_paths: child_path=$child_path" + echo "$root_path/$child_path" + return 0 +} + +get_machine_architecture() { + eval $invocation + + if command -v uname > /dev/null; then + CPUName=$(uname -m) + case $CPUName in + armv7l) + echo "arm" + return 0 + ;; + aarch64) + echo "arm64" + return 0 + ;; + esac + fi + + # Always default to 'x64' + echo "x64" + return 0 +} + +# args: +# architecture - $1 +get_normalized_architecture_from_architecture() { + eval $invocation + + local architecture="$(to_lowercase "$1")" + case "$architecture" in + \) + echo "$(get_normalized_architecture_from_architecture "$(get_machine_architecture)")" + return 0 + ;; + amd64|x64) + echo "x64" + return 0 + ;; + arm) + echo "arm" + return 0 + ;; + arm64) + echo "arm64" + return 0 + ;; + esac + + say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/cli/issues" + return 1 +} + +# The version text returned from the feeds is a 1-line or 2-line string: +# For the SDK and the dotnet runtime (2 lines): +# Line 1: # commit_hash +# Line 2: # 4-part version +# For the aspnetcore runtime (1 line): +# Line 1: # 4-part version + +# args: +# version_text - stdin +get_version_from_version_info() { + eval $invocation + + cat | tail -n 1 | sed 's/\r$//' + return 0 +} + +# args: +# install_root - $1 +# relative_path_to_package - $2 +# specific_version - $3 +is_dotnet_package_installed() { + eval $invocation + + local install_root="$1" + local relative_path_to_package="$2" + local specific_version="${3//[$'\t\r\n']}" + + local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")" + say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path" + + if [ -d "$dotnet_package_path" ]; then + return 0 + else + return 1 + fi +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# coherent - $4 +get_latest_version_info() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local coherent="$4" + + local version_file_url=null + if [[ "$runtime" == "dotnet" ]]; then + version_file_url="$uncached_feed/Runtime/$channel/latest.version" + elif [[ "$runtime" == "aspnetcore" ]]; then + version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version" + elif [ -z "$runtime" ]; then + if [ "$coherent" = true ]; then + version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version" + else + version_file_url="$uncached_feed/Sdk/$channel/latest.version" + fi + else + say_err "Invalid value for \$runtime" + return 1 + fi + say_verbose "get_latest_version_info: latest url: $version_file_url" + + download "$version_file_url" + return $? +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# version - $4 +get_specific_version_from_version() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local version="$(to_lowercase "$4")" + + case "$version" in + latest) + local version_info + version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1 + say_verbose "get_specific_version_from_version: version_info=$version_info" + echo "$version_info" | get_version_from_version_info + return 0 + ;; + coherent) + local version_info + version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1 + say_verbose "get_specific_version_from_version: version_info=$version_info" + echo "$version_info" | get_version_from_version_info + return 0 + ;; + *) + echo "$version" + return 0 + ;; + esac +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# specific_version - $4 +construct_download_link() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local specific_version="${4//[$'\t\r\n']}" + + local osname + osname="$(get_current_os_name)" || return 1 + + local download_link=null + if [[ "$runtime" == "dotnet" ]]; then + download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_version-$osname-$normalized_architecture.tar.gz" + elif [[ "$runtime" == "aspnetcore" ]]; then + download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_version-$osname-$normalized_architecture.tar.gz" + elif [ -z "$runtime" ]; then + download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_version-$osname-$normalized_architecture.tar.gz" + else + return 1 + fi + + echo "$download_link" + return 0 +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# specific_version - $4 +construct_legacy_download_link() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local specific_version="${4//[$'\t\r\n']}" + + local distro_specific_osname + distro_specific_osname="$(get_legacy_os_name)" || return 1 + + local legacy_download_link=null + if [[ "$runtime" == "dotnet" ]]; then + legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz" + elif [ -z "$runtime" ]; then + legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz" + else + return 1 + fi + + echo "$legacy_download_link" + return 0 +} + +get_user_install_path() { + eval $invocation + + if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then + echo "$DOTNET_INSTALL_DIR" + else + echo "$HOME/.dotnet" + fi + return 0 +} + +# args: +# install_dir - $1 +resolve_installation_path() { + eval $invocation + + local install_dir=$1 + if [ "$install_dir" = "" ]; then + local user_install_path="$(get_user_install_path)" + say_verbose "resolve_installation_path: user_install_path=$user_install_path" + echo "$user_install_path" + return 0 + fi + + echo "$install_dir" + return 0 +} + +# args: +# install_root - $1 +get_installed_version_info() { + eval $invocation + + local install_root="$1" + local version_file="$(combine_paths "$install_root" "$local_version_file_relative_path")" + say_verbose "Local version file: $version_file" + if [ ! -z "$version_file" ] | [ -r "$version_file" ]; then + local version_info="$(cat "$version_file")" + echo "$version_info" + return 0 + fi + + say_verbose "Local version file not found." + return 0 +} + +# args: +# relative_or_absolute_path - $1 +get_absolute_path() { + eval $invocation + + local relative_or_absolute_path=$1 + echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")" + return 0 +} + +# args: +# input_files - stdin +# root_path - $1 +# out_path - $2 +# override - $3 +copy_files_or_dirs_from_list() { + eval $invocation + + local root_path="$(remove_trailing_slash "$1")" + local out_path="$(remove_trailing_slash "$2")" + local override="$3" + local osname="$(get_current_os_name)" + local override_switch=$( + if [ "$override" = false ]; then + if [[ "$osname" == "linux-musl" ]]; then + printf -- "-u"; + else + printf -- "-n"; + fi + fi) + + cat | uniq | while read -r file_path; do + local path="$(remove_beginning_slash "${file_path#$root_path}")" + local target="$out_path/$path" + if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then + mkdir -p "$out_path/$(dirname "$path")" + if [ -d "$target" ]; then + rm -rf "$target" + fi + cp -R $override_switch "$root_path/$path" "$target" + fi + done +} + +# args: +# zip_path - $1 +# out_path - $2 +extract_dotnet_package() { + eval $invocation + + local zip_path="$1" + local out_path="$2" + + local temp_out_path="$(mktemp -d "$temporary_file_template")" + + local failed=false + tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true + + local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/' + find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false + find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files" + + rm -rf "$temp_out_path" + + if [ "$failed" = true ]; then + say_err "Extraction failed" + return 1 + fi +} + +# args: +# remote_path - $1 +# [out_path] - $2 - stdout if not provided +download() { + eval $invocation + + local remote_path="$1" + local out_path="${2:-}" + + if [[ "$remote_path" != "http"* ]]; then + cp "$remote_path" "$out_path" + return $? + fi + + local failed=false + if machine_has "curl"; then + downloadcurl "$remote_path" "$out_path" || failed=true + elif machine_has "wget"; then + downloadwget "$remote_path" "$out_path" || failed=true + else + failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Download failed: $remote_path" + return 1 + fi + return 0 +} + +downloadcurl() { + eval $invocation + local remote_path="$1" + local out_path="${2:-}" + + # Append feed_credential as late as possible before calling curl to avoid logging feed_credential + remote_path="${remote_path}${feed_credential}" + + local failed=false + if [ -z "$out_path" ]; then + curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true + else + curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Curl download failed" + return 1 + fi + return 0 +} + +downloadwget() { + eval $invocation + local remote_path="$1" + local out_path="${2:-}" + + # Append feed_credential as late as possible before calling wget to avoid logging feed_credential + remote_path="${remote_path}${feed_credential}" + + local failed=false + if [ -z "$out_path" ]; then + wget -q --tries 10 -O - "$remote_path" || failed=true + else + wget --tries 10 -O "$out_path" "$remote_path" || failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Wget download failed" + return 1 + fi + return 0 +} + +calculate_vars() { + eval $invocation + valid_legacy_download_link=true + + normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")" + say_verbose "normalized_architecture=$normalized_architecture" + + specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version")" + say_verbose "specific_version=$specific_version" + if [ -z "$specific_version" ]; then + say_err "Could not resolve version information." + return 1 + fi + + download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" + say_verbose "Constructed primary named payload URL: $download_link" + + legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false + + if [ "$valid_legacy_download_link" = true ]; then + say_verbose "Constructed legacy named payload URL: $legacy_download_link" + else + say_verbose "Cound not construct a legacy_download_link; omitting..." + fi + + install_root="$(resolve_installation_path "$install_dir")" + say_verbose "InstallRoot: $install_root" +} + +install_dotnet() { + eval $invocation + local download_failed=false + local asset_name='' + local asset_relative_path='' + + if [[ "$runtime" == "dotnet" ]]; then + asset_relative_path="shared/Microsoft.NETCore.App" + asset_name=".NET Core Runtime" + elif [[ "$runtime" == "aspnetcore" ]]; then + asset_relative_path="shared/Microsoft.AspNetCore.App" + asset_name="ASP.NET Core Runtime" + elif [ -z "$runtime" ]; then + asset_relative_path="sdk" + asset_name=".NET Core SDK" + else + say_err "Invalid value for \$runtime" + return 1 + fi + + # Check if the SDK version is already installed. + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then + say "$asset_name version $specific_version is already installed." + return 0 + fi + + mkdir -p "$install_root" + zip_path="$(mktemp "$temporary_file_template")" + say_verbose "Zip path: $zip_path" + + say "Downloading link: $download_link" + + # Failures are normal in the non-legacy case for ultimately legacy downloads. + # Do not output to stderr, since output to stderr is considered an error. + download "$download_link" "$zip_path" 2>&1 || download_failed=true + + # if the download fails, download the legacy_download_link + if [ "$download_failed" = true ]; then + say "Cannot download: $download_link" + + if [ "$valid_legacy_download_link" = true ]; then + download_failed=false + download_link="$legacy_download_link" + zip_path="$(mktemp "$temporary_file_template")" + say_verbose "Legacy zip path: $zip_path" + say "Downloading legacy link: $download_link" + download "$download_link" "$zip_path" 2>&1 || download_failed=true + + if [ "$download_failed" = true ]; then + say "Cannot download: $download_link" + fi + fi + fi + + if [ "$download_failed" = true ]; then + say_err "Could not find/download: \`$asset_name\` with version = $specific_version" + say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" + return 1 + fi + + say "Extracting zip from $download_link" + extract_dotnet_package "$zip_path" "$install_root" + + # Check if the SDK version is now installed; if not, fail the installation. + if ! is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then + say_err "\`$asset_name\` with version = $specific_version failed to install with an unknown error." + return 1 + fi + + return 0 +} + +args=("$@") + +local_version_file_relative_path="/.version" +bin_folder_relative_path="" +temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX" + +channel="LTS" +version="Latest" +install_dir="" +architecture="" +dry_run=false +no_path=false +no_cdn=false +azure_feed="https://dotnetcli.azureedge.net/dotnet" +uncached_feed="https://dotnetcli.blob.core.windows.net/dotnet" +feed_credential="" +verbose=false +runtime="" +runtime_id="" +override_non_versioned_files=true +non_dynamic_parameters="" + +while [ $# -ne 0 ] +do + name="$1" + case "$name" in + -c|--channel|-[Cc]hannel) + shift + channel="$1" + ;; + -v|--version|-[Vv]ersion) + shift + version="$1" + ;; + -i|--install-dir|-[Ii]nstall[Dd]ir) + shift + install_dir="$1" + ;; + --arch|--architecture|-[Aa]rch|-[Aa]rchitecture) + shift + architecture="$1" + ;; + --shared-runtime|-[Ss]hared[Rr]untime) + say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'." + if [ -z "$runtime" ]; then + runtime="dotnet" + fi + ;; + --runtime|-[Rr]untime) + shift + runtime="$1" + if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then + say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'." + if [[ "$runtime" == "windowsdesktop" ]]; then + say_err "WindowsDesktop archives are manufactured for Windows platforms only." + fi + exit 1 + fi + ;; + --dry-run|-[Dd]ry[Rr]un) + dry_run=true + ;; + --no-path|-[Nn]o[Pp]ath) + no_path=true + non_dynamic_parameters+=" $name" + ;; + --verbose|-[Vv]erbose) + verbose=true + non_dynamic_parameters+=" $name" + ;; + --no-cdn|-[Nn]o[Cc]dn) + no_cdn=true + non_dynamic_parameters+=" $name" + ;; + --azure-feed|-[Aa]zure[Ff]eed) + shift + azure_feed="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --uncached-feed|-[Uu]ncached[Ff]eed) + shift + uncached_feed="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --feed-credential|-[Ff]eed[Cc]redential) + shift + feed_credential="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --runtime-id|-[Rr]untime[Ii]d) + shift + runtime_id="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles) + override_non_versioned_files=false + non_dynamic_parameters+=" $name" + ;; + -?|--?|-h|--help|-[Hh]elp) + script_name="$(basename "$0")" + echo ".NET Tools Installer" + echo "Usage: $script_name [-c|--channel ] [-v|--version ] [-p|--prefix ]" + echo " $script_name -h|-?|--help" + echo "" + echo "$script_name is a simple command line interface for obtaining dotnet cli." + echo "" + echo "Options:" + echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`." + echo " -Channel" + echo " Possible values:" + echo " - Current - most current release" + echo " - LTS - most current supported release" + echo " - 2-part version in a format A.B - represents a specific release" + echo " examples: 2.0; 1.0" + echo " - Branch name" + echo " examples: release/2.0.0; Master" + echo " Note: The version parameter overrides the channel parameter." + echo " -v,--version Use specific VERSION, Defaults to \`$version\`." + echo " -Version" + echo " Possible values:" + echo " - latest - most latest build on specific channel" + echo " - coherent - most latest coherent build on specific channel" + echo " coherent applies only to SDK downloads" + echo " - 3-part version in a format A.B.C - represents specific version of build" + echo " examples: 2.0.0-preview2-006120; 1.1.0" + echo " -i,--install-dir Install under specified location (see Install Location below)" + echo " -InstallDir" + echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`." + echo " --arch,-Architecture,-Arch" + echo " Possible values: x64, arm, and arm64" + echo " --runtime Installs a shared runtime only, without the SDK." + echo " -Runtime" + echo " Possible values:" + echo " - dotnet - the Microsoft.NETCore.App shared runtime" + echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime" + echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable." + echo " -SkipNonVersionedFiles" + echo " --dry-run,-DryRun Do not perform installation. Display download link." + echo " --no-path, -NoPath Do not set PATH for the current process." + echo " --verbose,-Verbose Display diagnostics information." + echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user." + echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user." + echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly." + echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified." + echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)." + echo " -RuntimeId" + echo " -?,--?,-h,--help,-Help Shows this help message" + echo "" + echo "Obsolete parameters:" + echo " --shared-runtime The recommended alternative is '--runtime dotnet'." + echo " -SharedRuntime Installs just the shared runtime bits, not the entire SDK." + echo "" + echo "Install Location:" + echo " Location is chosen in following order:" + echo " - --install-dir option" + echo " - Environmental variable DOTNET_INSTALL_DIR" + echo " - $HOME/.dotnet" + exit 0 + ;; + *) + say_err "Unknown argument \`$name\`" + exit 1 + ;; + esac + + shift +done + +if [ "$no_cdn" = true ]; then + azure_feed="$uncached_feed" +fi + +check_min_reqs +calculate_vars +script_name=$(basename "$0") + +if [ "$dry_run" = true ]; then + say "Payload URLs:" + say "Primary named payload URL: $download_link" + if [ "$valid_legacy_download_link" = true ]; then + say "Legacy named payload URL: $legacy_download_link" + fi + repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\""" + if [[ "$runtime" == "dotnet" ]]; then + repeatable_command+=" --runtime "\""dotnet"\""" + elif [[ "$runtime" == "aspnetcore" ]]; then + repeatable_command+=" --runtime "\""aspnetcore"\""" + fi + repeatable_command+="$non_dynamic_parameters" + say "Repeatable invocation: $repeatable_command" + exit 0 +fi + +check_pre_reqs +install_dotnet + +bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")" +if [ "$no_path" = false ]; then + say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script." + export PATH="$bin_path":"$PATH" +else + say "Binaries of dotnet can be found in $bin_path" +fi + +say "Installation finished successfully." diff --git a/src/Misc/externals.sh b/src/Misc/externals.sh new file mode 100755 index 00000000000..0f37cc254ba --- /dev/null +++ b/src/Misc/externals.sh @@ -0,0 +1,148 @@ +#!/bin/bash +PACKAGERUNTIME=$1 +PRECACHE=$2 + +NODE_URL=https://nodejs.org/dist +NODE12_VERSION="12.4.0" + +get_abs_path() { + # exploits the fact that pwd will print abs path when no args + echo "$(cd "$(dirname "$1")" && pwd)/$(basename "$1")" +} + +LAYOUT_DIR=$(get_abs_path "$(dirname $0)/../../_layout") +DOWNLOAD_DIR="$(get_abs_path "$(dirname $0)/../../_downloads")/netcore2x" + +function failed() { + local error=${1:-Undefined error} + echo "Failed: $error" >&2 + exit 1 +} + +function checkRC() { + local rc=$? + if [ $rc -ne 0 ]; then + failed "${1} failed with return code $rc" + fi +} + +function acquireExternalTool() { + local download_source=$1 # E.g. https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe + local target_dir="$LAYOUT_DIR/externals/$2" # E.g. $LAYOUT_DIR/externals/vswhere + local fix_nested_dir=$3 # Flag that indicates whether to move nested contents up one directory. + + # Extract the portion of the URL after the protocol. E.g. github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe + local relative_url="${download_source#*://}" + + # Check if the download already exists. + local download_target="$DOWNLOAD_DIR/$relative_url" + local download_basename="$(basename "$download_target")" + local download_dir="$(dirname "$download_target")" + + if [[ "$PRECACHE" != "" ]]; then + if [ -f "$download_target" ]; then + echo "Download exists: $download_basename" + else + # Delete any previous partial file. + local partial_target="$DOWNLOAD_DIR/partial/$download_basename" + mkdir -p "$(dirname "$partial_target")" || checkRC 'mkdir' + if [ -f "$partial_target" ]; then + rm "$partial_target" || checkRC 'rm' + fi + + # Download from source to the partial file. + echo "Downloading $download_source" + mkdir -p "$(dirname "$download_target")" || checkRC 'mkdir' + # curl -f Fail silently (no output at all) on HTTP errors (H) + # -k Allow connections to SSL sites without certs (H) + # -S Show error. With -s, make curl show errors when they occur + # -L Follow redirects (H) + # -o FILE Write to FILE instead of stdout + curl -fkSL -o "$partial_target" "$download_source" 2>"${download_target}_download.log" || checkRC 'curl' + + # Move the partial file to the download target. + mv "$partial_target" "$download_target" || checkRC 'mv' + + # Extract to current directory + # Ensure we can extract those files + # We might use them during dev.sh + if [[ "$download_basename" == *.zip ]]; then + # Extract the zip. + echo "Testing zip" + unzip "$download_target" -d "$download_dir" > /dev/null + local rc=$? + if [[ $rc -ne 0 && $rc -ne 1 ]]; then + failed "unzip failed with return code $rc" + fi + elif [[ "$download_basename" == *.tar.gz ]]; then + # Extract the tar gz. + echo "Testing tar gz" + tar xzf "$download_target" -C "$download_dir" > /dev/null || checkRC 'tar' + fi + fi + else + # Extract to layout. + mkdir -p "$target_dir" || checkRC 'mkdir' + local nested_dir="" + if [[ "$download_basename" == *.zip ]]; then + # Extract the zip. + echo "Extracting zip to layout" + unzip "$download_target" -d "$target_dir" > /dev/null + local rc=$? + if [[ $rc -ne 0 && $rc -ne 1 ]]; then + failed "unzip failed with return code $rc" + fi + + # Capture the nested directory path if the fix_nested_dir flag is set. + if [[ "$fix_nested_dir" == "fix_nested_dir" ]]; then + nested_dir="${download_basename%.zip}" # Remove the trailing ".zip". + fi + elif [[ "$download_basename" == *.tar.gz ]]; then + # Extract the tar gz. + echo "Extracting tar gz to layout" + tar xzf "$download_target" -C "$target_dir" > /dev/null || checkRC 'tar' + + # Capture the nested directory path if the fix_nested_dir flag is set. + if [[ "$fix_nested_dir" == "fix_nested_dir" ]]; then + nested_dir="${download_basename%.tar.gz}" # Remove the trailing ".tar.gz". + fi + else + # Copy the file. + echo "Copying to layout" + cp "$download_target" "$target_dir/" || checkRC 'cp' + fi + + # Fixup the nested directory. + if [[ "$nested_dir" != "" ]]; then + if [ -d "$target_dir/$nested_dir" ]; then + mv "$target_dir/$nested_dir"/* "$target_dir/" || checkRC 'mv' + rmdir "$target_dir/$nested_dir" || checkRC 'rmdir' + fi + fi + fi +} + +# Download the external tools only for Windows. +if [[ "$PACKAGERUNTIME" == "win-x64" ]]; then + acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/win-x64/node.exe" node12/bin + acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/win-x64/node.lib" node12/bin + if [[ "$PRECACHE" != "" ]]; then + acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere + fi +fi + +# Download the external tools only for OSX. +if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then + acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir +fi + +# Download the external tools common across Linux PACKAGERUNTIMEs (excluding OSX). +if [[ "$PACKAGERUNTIME" == "linux-x64" || "$PACKAGERUNTIME" == "rhel.6-x64" ]]; then + acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir + # TODO: Repath this blob to use a consistent version format (_ vs .) + acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/12_4_0/alpine/node-v${NODE12_VERSION}-alpine.tar.gz" node12_alpine +fi + +if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then + acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir +fi diff --git a/src/Misc/layoutbin/RunnerService.js b/src/Misc/layoutbin/RunnerService.js new file mode 100644 index 00000000000..9af34397e29 --- /dev/null +++ b/src/Misc/layoutbin/RunnerService.js @@ -0,0 +1,91 @@ +#!/usr/bin/env node +// Copyright (c) GitHub. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +var childProcess = require("child_process"); +var path = require("path") + +var supported = ['linux', 'darwin'] + +if (supported.indexOf(process.platform) == -1) { + console.log('Unsupported platform: ' + process.platform); + console.log('Supported platforms are: ' + supported.toString()); + process.exit(1); +} + +var stopping = false; +var listener = null; + +var runService = function() { + var listenerExePath = path.join(__dirname, '../bin/Runner.Listener'); + var interactive = process.argv[2] === "interactive"; + + if(!stopping) { + try { + if (interactive) { + console.log('Starting Runner listener interactively'); + listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env }); + } else { + console.log('Starting Runner listener with startup type: service'); + listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env }); + } + + console.log('Started listener process'); + + listener.stdout.on('data', (data) => { + process.stdout.write(data.toString('utf8')); + }); + + listener.stderr.on('data', (data) => { + process.stdout.write(data.toString('utf8')); + }); + + listener.on('close', (code) => { + console.log(`Runner listener exited with error code ${code}`); + + if (code === 0) { + console.log('Runner listener exit with 0 return code, stop the service, no retry needed.'); + stopping = true; + } else if (code === 1) { + console.log('Runner listener exit with terminated error, stop the service, no retry needed.'); + stopping = true; + } else if (code === 2) { + console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.'); + } else if (code === 3) { + console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.'); + } else { + console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.'); + } + + if(!stopping) { + setTimeout(runService, 5000); + } + }); + + } catch(ex) { + console.log(ex); + } + } +} + +runService(); +console.log('Started running service'); + +var gracefulShutdown = function(code) { + console.log('Shutting down runner listener'); + stopping = true; + if (listener) { + console.log('Sending SIGINT to runner listener to stop'); + listener.kill('SIGINT'); + + // TODO wait for 30 seconds and send a SIGKILL + } +} + +process.on('SIGINT', () => { + gracefulShutdown(0); +}); + +process.on('SIGTERM', () => { + gracefulShutdown(0); +}); diff --git a/src/Misc/layoutbin/actions.runner.plist.template b/src/Misc/layoutbin/actions.runner.plist.template new file mode 100644 index 00000000000..351c1100142 --- /dev/null +++ b/src/Misc/layoutbin/actions.runner.plist.template @@ -0,0 +1,27 @@ + + + + + Label + {{SvcName}} + ProgramArguments + + {{RunnerRoot}}/runsvc.sh + + UserName + {{User}} + WorkingDirectory + {{RunnerRoot}} + RunAtLoad + + StandardOutPath + {{UserHome}}/Library/Logs/{{SvcName}}/stdout.log + StandardErrorPath + {{UserHome}}/Library/Logs/{{SvcName}}/stderr.log + EnvironmentVariables + + ACTIONS_RUNNER_SVC + 1 + + + diff --git a/src/Misc/layoutbin/actions.runner.service.template b/src/Misc/layoutbin/actions.runner.service.template new file mode 100644 index 00000000000..4dcec7e368c --- /dev/null +++ b/src/Misc/layoutbin/actions.runner.service.template @@ -0,0 +1,14 @@ +[Unit] +Description={{Description}} +After=network.target + +[Service] +ExecStart={{RunnerRoot}}/runsvc.sh +User={{User}} +WorkingDirectory={{RunnerRoot}} +KillMode=process +KillSignal=SIGTERM +TimeoutStopSec=5min + +[Install] +WantedBy=multi-user.target diff --git a/src/Misc/layoutbin/darwin.svc.sh.template b/src/Misc/layoutbin/darwin.svc.sh.template new file mode 100644 index 00000000000..5210eb94d81 --- /dev/null +++ b/src/Misc/layoutbin/darwin.svc.sh.template @@ -0,0 +1,135 @@ +#!/bin/bash + +SVC_NAME="{{SvcNameVar}}" +SVC_DESCRIPTION="{{SvcDescription}}" + +user_id=`id -u` + +# launchctl should not run as sudo for launch runners +if [ $user_id -eq 0 ]; then + echo "Must not run with sudo" + exit 1 +fi + +SVC_CMD=$1 +RUNNER_ROOT=`pwd` + +LAUNCH_PATH="${HOME}/Library/LaunchAgents" +PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist" +TEMPLATE_PATH=./bin/actions.runner.plist.template +TEMP_PATH=./bin/actions.runner.plist.temp +CONFIG_PATH=.service + +function failed() +{ + local error=${1:-Undefined error} + echo "Failed: $error" >&2 + exit 1 +} + +if [ ! -f "${TEMPLATE_PATH}" ]; then + failed "Must run from runner root or install is corrupt" +fi + +function install() +{ + echo "Creating launch runner in ${PLIST_PATH}" + + if [ ! -d "${LAUNCH_PATH}" ]; then + mkdir ${LAUNCH_PATH} + fi + + if [ -f "${PLIST_PATH}" ]; then + failed "error: exists ${PLIST_PATH}" + fi + + if [ -f "${TEMP_PATH}" ]; then + rm "${TEMP_PATH}" || failed "failed to delete ${TEMP_PATH}" + fi + + log_path="${HOME}/Library/Logs/${SVC_NAME}" + echo "Creating ${log_path}" + mkdir -p "${log_path}" || failed "failed to create ${log_path}" + + echo Creating ${PLIST_PATH} + sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file" + mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist" + + # Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user. + echo Creating runsvc.sh + cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh" + chmod u+x ./runsvc.sh || failed "failed to set permission for runsvc.sh" + + echo Creating ${CONFIG_PATH} + echo "${PLIST_PATH}" > ${CONFIG_PATH} || failed "failed to create .Service file" + + echo "svc install complete" +} + +function start() +{ + echo "starting ${SVC_NAME}" + launchctl load -w "${PLIST_PATH}" || failed "failed to load ${PLIST_PATH}" + status +} + +function stop() +{ + echo "stopping ${SVC_NAME}" + launchctl unload "${PLIST_PATH}" || failed "failed to unload ${PLIST_PATH}" + status +} + +function uninstall() +{ + echo "uninstalling ${SVC_NAME}" + stop + rm "${PLIST_PATH}" || failed "failed to delete ${PLIST_PATH}" + if [ -f "${CONFIG_PATH}" ]; then + rm "${CONFIG_PATH}" || failed "failed to delete ${CONFIG_PATH}" + fi +} + +function status() +{ + echo "status ${SVC_NAME}:" + if [ -f "${PLIST_PATH}" ]; then + echo + echo "${PLIST_PATH}" + else + echo + echo "not installed" + echo + return + fi + + echo + status_out=`launchctl list | grep "${SVC_NAME}"` + if [ ! -z "$status_out" ]; then + echo Started: + echo $status_out + echo + else + echo Stopped + echo + fi +} + +function usage() +{ + echo + echo Usage: + echo "./svc.sh [install, start, stop, status, uninstall]" + echo +} + +case $SVC_CMD in + "install") install;; + "status") status;; + "uninstall") uninstall;; + "start") start;; + "stop") stop;; + *) usage;; +esac + +exit 0 diff --git a/src/Misc/layoutbin/installdependencies.sh b/src/Misc/layoutbin/installdependencies.sh new file mode 100755 index 00000000000..0375c0a43f2 --- /dev/null +++ b/src/Misc/layoutbin/installdependencies.sh @@ -0,0 +1,298 @@ +#!/bin/bash + +user_id=`id -u` + +if [ $user_id -ne 0 ]; then + echo "Need to run with sudo privilege" + exit 1 +fi + +# Determine OS type +# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version +# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release +# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release + +function print_errormessage() +{ + echo "Can't install dotnet core dependencies." + echo "You can manually install all required dependencies based on following documentation" + echo "https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x" +} + +function print_rhel6message() +{ + echo "We did our best effort to install dotnet core dependencies" + echo "However, there are some dependencies which require manual installation" + echo "You can install all remaining required dependencies based on the following documentation" + echo "https://github.com/dotnet/core/blob/master/Documentation/build-and-install-rhel6-prerequisites.md" +} + +function print_rhel6errormessage() +{ + echo "We couldn't install dotnet core dependencies" + echo "You can manually install all required dependencies based on following documentation" + echo "https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x" + echo "In addition, there are some dependencies which require manual installation. Please follow this documentation" + echo "https://github.com/dotnet/core/blob/master/Documentation/build-and-install-rhel6-prerequisites.md" +} + +if [ -e /etc/os-release ] +then + echo "--------OS Information--------" + cat /etc/os-release + echo "------------------------------" + + if [ -e /etc/debian_version ] + then + echo "The current OS is Debian based" + echo "--------Debian Version--------" + cat /etc/debian_version + echo "------------------------------" + + # prefer apt over apt-get + command -v apt + if [ $? -eq 0 ] + then + apt update && apt install -y liblttng-ust0 libkrb5-3 zlib1g + if [ $? -ne 0 ] + then + echo "'apt' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # ubuntu 18 uses libcurl4 + # ubuntu 14, 16 and other linux use libcurl3 + apt install -y libcurl3 || apt install -y libcurl4 + if [ $? -ne 0 ] + then + echo "'apt' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # debian 9 use libssl1.0.2 + # other debian linux use libssl1.0.0 + apt install -y libssl1.0.0 || apt install -y libssl1.0.2 + if [ $? -ne 0 ] + then + echo "'apt' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # libicu version prefer: libicu52 -> libicu55 -> libicu57 -> libicu60 + apt install -y libicu52 || apt install -y libicu55 || apt install -y libicu57 || apt install -y libicu60 + if [ $? -ne 0 ] + then + echo "'apt' failed with exit code '$?'" + print_errormessage + exit 1 + fi + else + command -v apt-get + if [ $? -eq 0 ] + then + apt-get update && apt-get install -y liblttng-ust0 libkrb5-3 zlib1g + if [ $? -ne 0 ] + then + echo "'apt-get' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # ubuntu 18 uses libcurl4 + # ubuntu 14, 16 and other linux use libcurl3 + apt-get install -y libcurl3 || apt-get install -y libcurl4 + if [ $? -ne 0 ] + then + echo "'apt-get' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # debian 9 use libssl1.0.2 + # other debian linux use libssl1.0.0 + apt-get install -y libssl1.0.0 || apt install -y libssl1.0.2 + if [ $? -ne 0 ] + then + echo "'apt-get' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # libicu version prefer: libicu52 -> libicu55 -> libicu57 -> libicu60 + apt-get install -y libicu52 || apt install -y libicu55 || apt install -y libicu57 || apt install -y libicu60 + if [ $? -ne 0 ] + then + echo "'apt-get' failed with exit code '$?'" + print_errormessage + exit 1 + fi + else + echo "Can not find 'apt' or 'apt-get'" + print_errormessage + exit 1 + fi + fi + elif [ -e /etc/redhat-release ] + then + echo "The current OS is Fedora based" + echo "--------Redhat Version--------" + cat /etc/redhat-release + echo "------------------------------" + + # use dnf on fedora + # use yum on centos and redhat + if [ -e /etc/fedora-release ] + then + command -v dnf + if [ $? -eq 0 ] + then + useCompatSsl=0 + grep -i 'fedora release 28' /etc/fedora-release + if [ $? -eq 0 ] + then + useCompatSsl=1 + else + grep -i 'fedora release 27' /etc/fedora-release + if [ $? -eq 0 ] + then + useCompatSsl=1 + else + grep -i 'fedora release 26' /etc/fedora-release + if [ $? -eq 0 ] + then + useCompatSsl=1 + fi + fi + fi + + if [ $useCompatSsl -eq 1 ] + then + echo "Use compat-openssl10-devel instead of openssl-devel for Fedora 27/28 (dotnet core requires openssl 1.0.x)" + dnf install -y compat-openssl10 + if [ $? -ne 0 ] + then + echo "'dnf' failed with exit code '$?'" + print_errormessage + exit 1 + fi + else + dnf install -y openssl-libs + if [ $? -ne 0 ] + then + echo "'dnf' failed with exit code '$?'" + print_errormessage + exit 1 + fi + fi + + dnf install -y lttng-ust libcurl krb5-libs zlib libicu + if [ $? -ne 0 ] + then + echo "'dnf' failed with exit code '$?'" + print_errormessage + exit 1 + fi + else + echo "Can not find 'dnf'" + print_errormessage + exit 1 + fi + else + command -v yum + if [ $? -eq 0 ] + then + yum install -y openssl-libs libcurl krb5-libs zlib libicu + if [ $? -ne 0 ] + then + echo "'yum' failed with exit code '$?'" + print_errormessage + exit 1 + fi + + # install lttng-ust separately since it's not part of offical package repository + yum install -y wget && wget -P /etc/yum.repos.d/ https://packages.efficios.com/repo.files/EfficiOS-RHEL7-x86-64.repo && rpmkeys --import https://packages.efficios.com/rhel/repo.key && yum updateinfo && yum install -y lttng-ust + if [ $? -ne 0 ] + then + echo "'lttng-ust' installation failed with exit code '$?'" + print_errormessage + exit 1 + fi + else + echo "Can not find 'yum'" + print_errormessage + exit 1 + fi + fi + else + # we might on OpenSUSE + OSTYPE=$(grep ID_LIKE /etc/os-release | cut -f2 -d=) + echo $OSTYPE + if [ $OSTYPE == '"suse"' ] + then + echo "The current OS is SUSE based" + command -v zypper + if [ $? -eq 0 ] + then + zypper -n install lttng-ust libopenssl1_0_0 libcurl4 krb5 zlib libicu52_1 + if [ $? -ne 0 ] + then + echo "'zypper' failed with exit code '$?'" + print_errormessage + exit 1 + fi + else + echo "Can not find 'zypper'" + print_errormessage + exit 1 + fi + else + echo "Can't detect current OS type based on /etc/os-release." + print_errormessage + exit 1 + fi + fi +elif [ -e /etc/redhat-release ] +# RHEL6 doesn't have an os-release file defined, read redhat-release instead +then + redhatRelease=$(&2 + exit 1 +} + +if [ ! -f "${TEMPLATE_PATH}" ]; then + failed "Must run from runner root or install is corrupt" +fi + +#check if we run as root +if [[ $(id -u) != "0" ]]; then + echo "Failed: This script requires to run with sudo." >&2 + exit 1 +fi + +function install() +{ + echo "Creating launch runner in ${UNIT_PATH}" + if [ -f "${UNIT_PATH}" ]; then + failed "error: exists ${UNIT_PATH}" + fi + + if [ -f "${TEMP_PATH}" ]; then + rm "${TEMP_PATH}" || failed "failed to delete ${TEMP_PATH}" + fi + + # can optionally use username supplied + run_as_user=${arg_2:-$SUDO_USER} + echo "Run as user: ${run_as_user}" + + run_as_uid=$(id -u ${run_as_user}) || failed "User does not exist" + echo "Run as uid: ${run_as_uid}" + + run_as_gid=$(id -g ${run_as_user}) || failed "Group not available" + echo "gid: ${run_as_gid}" + + sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file" + mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file" + + # unit file should not be executable and world writable + chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}" + systemctl daemon-reload || failed "failed to reload daemons" + + # Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user. + cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh" + chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh" + chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh" + + systemctl enable ${SVC_NAME} || failed "failed to enable ${SVC_NAME}" + + echo "${SVC_NAME}" > ${CONFIG_PATH} || failed "failed to create .service file" + chown ${run_as_uid}:${run_as_gid} ${CONFIG_PATH} || failed "failed to set permission for ${CONFIG_PATH}" +} + +function start() +{ + systemctl start ${SVC_NAME} || failed "failed to start ${SVC_NAME}" + status +} + +function stop() +{ + systemctl stop ${SVC_NAME} || failed "failed to stop ${SVC_NAME}" + status +} + +function uninstall() +{ + stop + systemctl disable ${SVC_NAME} || failed "failed to disable ${SVC_NAME}" + rm "${UNIT_PATH}" || failed "failed to delete ${UNIT_PATH}" + if [ -f "${CONFIG_PATH}" ]; then + rm "${CONFIG_PATH}" || failed "failed to delete ${CONFIG_PATH}" + fi + systemctl daemon-reload || failed "failed to reload daemons" +} + +function status() +{ + if [ -f "${UNIT_PATH}" ]; then + echo + echo "${UNIT_PATH}" + else + echo + echo "not installed" + echo + return + fi + + systemctl --no-pager status ${SVC_NAME} +} + +function usage() +{ + echo + echo Usage: + echo "./svc.sh [install, start, stop, status, uninstall]" + echo "Commands:" + echo " install [user]: Install runner service as Root or specified user." + echo " start: Manually start the runner service." + echo " stop: Manually stop the runner service." + echo " status: Display status of runner service." + echo " uninstall: Uninstall runner service." + echo +} + +case $SVC_CMD in + "install") install;; + "status") status;; + "uninstall") uninstall;; + "start") start;; + "stop") stop;; + "status") status;; + *) usage;; +esac + +exit 0 diff --git a/src/Misc/layoutbin/update.cmd.template b/src/Misc/layoutbin/update.cmd.template new file mode 100644 index 00000000000..70d86fd60a7 --- /dev/null +++ b/src/Misc/layoutbin/update.cmd.template @@ -0,0 +1,143 @@ +@echo off + +rem runner will replace key words in the template and generate a batch script to run. +rem Keywords: +rem PROCESSID = pid +rem RUNNERPROCESSNAME = Runner.Listener[.exe] +rem ROOTFOLDER = ./ +rem EXISTRUNNERVERSION = 2.100.0 +rem DOWNLOADRUNNERVERSION = 2.101.0 +rem UPDATELOG = _diag/SelfUpdate-UTC.log +rem RESTARTINTERACTIVERUNNER = 0/1 + +setlocal +set runnerpid=_PROCESS_ID_ +set runnerprocessname=_RUNNER_PROCESS_NAME_ +set rootfolder=_ROOT_FOLDER_ +set existrunnerversion=_EXIST_RUNNER_VERSION_ +set downloadrunnerversion=_DOWNLOAD_RUNNER_VERSION_ +set logfile=_UPDATE_LOG_ +set restartinteractiverunner=_RESTART_INTERACTIVE_RUNNER_ + +rem log user who run the script +echo [%date% %time%] --------whoami-------- >> "%logfile%" 2>&1 +whoami >> "%logfile%" 2>&1 +echo [%date% %time%] --------whoami-------- >> "%logfile%" 2>&1 + +rem wait for runner process to exit. +echo [%date% %time%] Waiting for %runnerprocessname% (%runnerpid%) to complete >> "%logfile%" 2>&1 +:loop +tasklist /fi "pid eq %runnerpid%" | find /I "%runnerprocessname%" >> "%logfile%" 2>&1 +if ERRORLEVEL 1 ( + goto copy +) + +echo [%date% %time%] Process %runnerpid% still running, check again after 1 second. >> "%logfile%" 2>&1 +ping -n 2 127.0.0.1 >nul +goto loop + +rem start re-organize folders +:copy +echo [%date% %time%] Process %runnerpid% finished running >> "%logfile%" 2>&1 +echo [%date% %time%] Sleep 1 more second to make sure process exited >> "%logfile%" 2>&1 +ping -n 2 127.0.0.1 >nul +echo [%date% %time%] Re-organize folders >> "%logfile%" 2>&1 + +rem the folder structure under runner root will be +rem ./bin -> bin.2.100.0 (junction folder) +rem ./externals -> externals.2.100.0 (junction folder) +rem ./bin.2.100.0 +rem ./externals.2.100.0 +rem ./bin.2.99.0 +rem ./externals.2.99.0 +rem by using the juction folder we can avoid file in use problem. + +rem if the bin/externals junction point already exist, we just need to delete the juction point then re-create to point to new bin/externals folder. +rem if the bin/externals still are real folders, we need to rename the existing folder to bin.version format then create junction point to new bin/externals folder. + +rem check bin folder +rem we do findstr /C:" bin" since in migration mode, we create a junction folder from runner to bin. +rem as result, dir /AL | findstr "bin" will return the runner folder. output looks like (07/27/2016 05:21 PM runner [E:\bin]) +dir "%rootfolder%" /AL 2>&1 | findstr /C:" bin" >> "%logfile%" 2>&1 +if ERRORLEVEL 1 ( + rem return code 1 means it can't find a bin folder that is a junction folder + rem so we need to move the current bin folder to bin.2.99.0 folder. + echo [%date% %time%] move "%rootfolder%\bin" "%rootfolder%\bin.%existrunnerversion%" >> "%logfile%" 2>&1 + move "%rootfolder%\bin" "%rootfolder%\bin.%existrunnerversion%" >> "%logfile%" 2>&1 + if ERRORLEVEL 1 ( + echo [%date% %time%] Can't move "%rootfolder%\bin" to "%rootfolder%\bin.%existrunnerversion%" >> "%logfile%" 2>&1 + goto fail + ) + +) else ( + rem otherwise it find a bin folder that is a junction folder + rem we just need to delete the junction point. + echo [%date% %time%] Delete existing junction bin folder >> "%logfile%" 2>&1 + rmdir "%rootfolder%\bin" >> "%logfile%" 2>&1 + if ERRORLEVEL 1 ( + echo [%date% %time%] Can't delete existing junction bin folder >> "%logfile%" 2>&1 + goto fail + ) +) + +rem check externals folder +dir "%rootfolder%" /AL 2>&1 | findstr "externals" >> "%logfile%" 2>&1 +if ERRORLEVEL 1 ( + rem return code 1 means it can't find a externals folder that is a junction folder + rem so we need to move the current externals folder to externals.2.99.0 folder. + echo [%date% %time%] move "%rootfolder%\externals" "%rootfolder%\externals.%existrunnerversion%" >> "%logfile%" 2>&1 + move "%rootfolder%\externals" "%rootfolder%\externals.%existrunnerversion%" >> "%logfile%" 2>&1 + if ERRORLEVEL 1 ( + echo [%date% %time%] Can't move "%rootfolder%\externals" to "%rootfolder%\externals.%existrunnerversion%" >> "%logfile%" 2>&1 + goto fail + ) +) else ( + rem otherwise it find a externals folder that is a junction folder + rem we just need to delete the junction point. + echo [%date% %time%] Delete existing junction externals folder >> "%logfile%" 2>&1 + rmdir "%rootfolder%\externals" >> "%logfile%" 2>&1 + if ERRORLEVEL 1 ( + echo [%date% %time%] Can't delete existing junction externals folder >> "%logfile%" 2>&1 + goto fail + ) +) + +rem create junction bin folder +echo [%date% %time%] Create junction bin folder >> "%logfile%" 2>&1 +mklink /J "%rootfolder%\bin" "%rootfolder%\bin.%downloadrunnerversion%" >> "%logfile%" 2>&1 +if ERRORLEVEL 1 ( + echo [%date% %time%] Can't create junction bin folder >> "%logfile%" 2>&1 + goto fail +) + +rem create junction externals folder +echo [%date% %time%] Create junction externals folder >> "%logfile%" 2>&1 +mklink /J "%rootfolder%\externals" "%rootfolder%\externals.%downloadrunnerversion%" >> "%logfile%" 2>&1 +if ERRORLEVEL 1 ( + echo [%date% %time%] Can't create junction externals folder >> "%logfile%" 2>&1 + goto fail +) + +echo [%date% %time%] Update succeed >> "%logfile%" 2>&1 + +rem rename the update log file with %logfile%.succeed/.failed/succeedneedrestart +rem runner service host can base on the log file name determin the result of the runner update +echo [%date% %time%] Rename "%logfile%" to be "%logfile%.succeed" >> "%logfile%" 2>&1 +move "%logfile%" "%logfile%.succeed" >nul + +rem restart interactive runner if needed +if %restartinteractiverunner% equ 1 ( + echo [%date% %time%] Restart interactive runner >> "%logfile%.succeed" 2>&1 + endlocal + start "Actions Runner" cmd.exe /k "_ROOT_FOLDER_\run.cmd" +) else ( + endlocal +) + +goto :eof + +:fail +echo [%date% %time%] Rename "%logfile%" to be "%logfile%.failed" >> "%logfile%" 2>&1 +move "%logfile%" "%logfile%.failed" >nul +goto :eof + diff --git a/src/Misc/layoutbin/update.sh.template b/src/Misc/layoutbin/update.sh.template new file mode 100644 index 00000000000..c09cc1d5b4c --- /dev/null +++ b/src/Misc/layoutbin/update.sh.template @@ -0,0 +1,133 @@ +#!/bin/bash + +# runner will replace key words in the template and generate a batch script to run. +# Keywords: +# PROCESSID = pid +# RUNNERPROCESSNAME = Runner.Listener[.exe] +# ROOTFOLDER = ./ +# EXISTRUNNERVERSION = 2.100.0 +# DOWNLOADRUNNERVERSION = 2.101.0 +# UPDATELOG = _diag/SelfUpdate-UTC.log +# RESTARTINTERACTIVERUNNER = 0/1 + +runnerpid=_PROCESS_ID_ +runnerprocessname=_RUNNER_PROCESS_NAME_ +rootfolder="_ROOT_FOLDER_" +existrunnerversion=_EXIST_RUNNER_VERSION_ +downloadrunnerversion=_DOWNLOAD_RUNNER_VERSION_ +logfile="_UPDATE_LOG_" +restartinteractiverunner=_RESTART_INTERACTIVE_RUNNER_ + +# log user who run the script +date "+[%F %T-%4N] --------whoami--------" >> "$logfile" 2>&1 +whoami >> "$logfile" 2>&1 +date "+[%F %T-%4N] --------whoami--------" >> "$logfile" 2>&1 + +# wait for runner process to exit. +date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "$logfile" 2>&1 +while [ -e /proc/$runnerpid ] +do + date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1 + ping -c 2 127.0.0.1 >nul +done +date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1 + +# start re-organize folders +date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1 +ping -c 2 127.0.0.1 >nul + +# the folder structure under runner root will be +# ./bin -> bin.2.100.0 (junction folder) +# ./externals -> externals.2.100.0 (junction folder) +# ./bin.2.100.0 +# ./externals.2.100.0 +# ./bin.2.99.0 +# ./externals.2.99.0 +# by using the juction folder we can avoid file in use problem. + +# if the bin/externals junction point already exist, we just need to delete the juction point then re-create to point to new bin/externals folder. +# if the bin/externals still are real folders, we need to rename the existing folder to bin.version format then create junction point to new bin/externals folder. + +# check bin folder +if [[ -L "$rootfolder/bin" && -d "$rootfolder/bin" ]] +then + # return code 0 means it find a bin folder that is a junction folder + # we just need to delete the junction point. + date "+[%F %T-%4N] Delete existing junction bin folder" >> "$logfile" + rm "$rootfolder/bin" >> "$logfile" + if [ $? -ne 0 ] + then + date "+[%F %T-%4N] Can't delete existing junction bin folder" >> "$logfile" + mv -fv "$logfile" "$logfile.failed" + exit 1 + fi +else + # otherwise, we need to move the current bin folder to bin.2.99.0 folder. + date "+[%F %T-%4N] move $rootfolder/bin $rootfolder/bin.$existrunnerversion" >> "$logfile" 2>&1 + mv -fv "$rootfolder/bin" "$rootfolder/bin.$existrunnerversion" >> "$logfile" 2>&1 + if [ $? -ne 0 ] + then + date "+[%F %T-%4N] Can't move $rootfolder/bin to $rootfolder/bin.$existrunnerversion" >> "$logfile" 2>&1 + mv -fv "$logfile" "$logfile.failed" + exit 1 + fi +fi + +# check externals folder +if [[ -L "$rootfolder/externals" && -d "$rootfolder/externals" ]] +then + # the externals folder is already a junction folder + # we just need to delete the junction point. + date "+[%F %T-%4N] Delete existing junction externals folder" >> "$logfile" + rm "$rootfolder/externals" >> "$logfile" + if [ $? -ne 0 ] + then + date "+[%F %T-%4N] Can't delete existing junction externals folder" >> "$logfile" + mv -fv "$logfile" "$logfile.failed" + exit 1 + fi +else + # otherwise, we need to move the current externals folder to externals.2.99.0 folder. + date "+[%F %T-%4N] move $rootfolder/externals $rootfolder/externals.$existrunnerversion" >> "$logfile" 2>&1 + mv -fv "$rootfolder/externals" "$rootfolder/externals.$existrunnerversion" >> "$logfile" 2>&1 + if [ $? -ne 0 ] + then + date "+[%F %T-%4N] Can't move $rootfolder/externals to $rootfolder/externals.$existrunnerversion" >> "$logfile" 2>&1 + mv -fv "$logfile" "$logfile.failed" + exit 1 + fi +fi + +# create junction bin folder +date "+[%F %T-%4N] Create junction bin folder" >> "$logfile" 2>&1 +ln -s "$rootfolder/bin.$downloadrunnerversion" "$rootfolder/bin" >> "$logfile" 2>&1 +if [ $? -ne 0 ] +then + date "+[%F %T-%4N] Can't create junction bin folder" >> "$logfile" 2>&1 + mv -fv "$logfile" "$logfile.failed" + exit 1 +fi + +# create junction externals folder +date "+[%F %T-%4N] Create junction externals folder" >> "$logfile" 2>&1 +ln -s "$rootfolder/externals.$downloadrunnerversion" "$rootfolder/externals" >> "$logfile" 2>&1 +if [ $? -ne 0 ] +then + date "+[%F %T-%4N] Can't create junction externals folder" >> "$logfile" 2>&1 + mv -fv "$logfile" "$logfile.failed" + exit 1 +fi + +date "+[%F %T-%4N] Update succeed" >> "$logfile" + +# rename the update log file with %logfile%.succeed/.failed/succeedneedrestart +# runner service host can base on the log file name determin the result of the runner update +date "+[%F %T-%4N] Rename $logfile to be $logfile.succeed" >> "$logfile" 2>&1 +mv -fv "$logfile" "$logfile.succeed" >> "$logfile" 2>&1 + +# restart interactive runner if needed +if [ $restartinteractiverunner -ne 0 ] +then + date "+[%F %T-%4N] Restarting interactive runner" >> "$logfile.succeed" 2>&1 + "$rootfolder/run.sh" & +fi diff --git a/src/Misc/layoutroot/config.cmd b/src/Misc/layoutroot/config.cmd new file mode 100644 index 00000000000..31c62ff3e6b --- /dev/null +++ b/src/Misc/layoutroot/config.cmd @@ -0,0 +1,26 @@ +@echo off + +rem ******************************************************************************** +rem Unblock specific files. +rem ******************************************************************************** +setlocal +if defined VERBOSE_ARG ( + set VERBOSE_ARG='Continue' +) else ( + set VERBOSE_ARG='SilentlyContinue' +) + +rem Unblock files in the root of the layout folder. E.g. .cmd files. +powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "$VerbosePreference = %VERBOSE_ARG% ; Get-ChildItem -LiteralPath '%~dp0' | ForEach-Object { Write-Verbose ('Unblock: {0}' -f $_.FullName) ; $_ } | Unblock-File | Out-Null" + +if /i "%~1" equ "remove" ( + rem ******************************************************************************** + rem Unconfigure the runner. + rem ******************************************************************************** + "%~dp0bin\Runner.Listener.exe" %* +) else ( + rem ******************************************************************************** + rem Configure the runner. + rem ******************************************************************************** + "%~dp0bin\Runner.Listener.exe" configure %* +) diff --git a/src/Misc/layoutroot/config.sh b/src/Misc/layoutroot/config.sh new file mode 100755 index 00000000000..20ec606f776 --- /dev/null +++ b/src/Misc/layoutroot/config.sh @@ -0,0 +1,86 @@ +#!/bin/bash + +user_id=`id -u` + +# we want to snapshot the environment of the config user +if [ $user_id -eq 0 -a -z "$AGENT_ALLOW_RUNASROOT" ]; then + echo "Must not run with sudo" + exit 1 +fi + +# Check dotnet core 2.1 dependencies for Linux +if [[ (`uname` == "Linux") ]] +then + command -v ldd > /dev/null + if [ $? -ne 0 ] + then + echo "Can not find 'ldd'. Please install 'ldd' and try again." + exit 1 + fi + + ldd ./bin/libcoreclr.so | grep 'not found' + if [ $? -eq 0 ]; then + echo "Dependencies is missing for Dotnet Core 2.1" + echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 2.1 dependencies." + exit 1 + fi + + ldd ./bin/System.Security.Cryptography.Native.OpenSsl.so | grep 'not found' + if [ $? -eq 0 ]; then + echo "Dependencies is missing for Dotnet Core 2.1" + echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 2.1 dependencies." + exit 1 + fi + + ldd ./bin/System.IO.Compression.Native.so | grep 'not found' + if [ $? -eq 0 ]; then + echo "Dependencies is missing for Dotnet Core 2.1" + echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 2.1 dependencies." + exit 1 + fi + + ldd ./bin/System.Net.Http.Native.so | grep 'not found' + if [ $? -eq 0 ]; then + echo "Dependencies is missing for Dotnet Core 2.1" + echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 2.1 dependencies." + exit 1 + fi + + if ! [ -x "$(command -v ldconfig)" ]; then + LDCONFIG_COMMAND="/sbin/ldconfig" + if ! [ -x "$LDCONFIG_COMMAND" ]; then + echo "Can not find 'ldconfig' in PATH and '/sbin/ldconfig' doesn't exists either. Please install 'ldconfig' and try again." + exit 1 + fi + else + LDCONFIG_COMMAND="ldconfig" + fi + + libpath=${LD_LIBRARY_PATH:-} + $LDCONFIG_COMMAND -NXv ${libpath//:/} 2>&1 | grep libicu >/dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "Libicu's dependencies is missing for Dotnet Core 2.1" + echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 2.1 dependencies." + exit 1 + fi +fi + +# Change directory to the script root directory +# https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within +SOURCE="${BASH_SOURCE[0]}" +while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located +done +DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" +cd $DIR + +source ./env.sh + +shopt -s nocasematch +if [[ "$1" == "remove" ]]; then + ./bin/Runner.Listener "$@" +else + ./bin/Runner.Listener configure "$@" +fi diff --git a/src/Misc/layoutroot/env.sh b/src/Misc/layoutroot/env.sh new file mode 100755 index 00000000000..9aaf2bf5176 --- /dev/null +++ b/src/Misc/layoutroot/env.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +varCheckList=( + 'LANG' + 'JAVA_HOME' + 'ANT_HOME' + 'M2_HOME' + 'ANDROID_HOME' + 'GRADLE_HOME' + 'NVM_BIN' + 'NVM_PATH' + 'VSTS_HTTP_PROXY' + 'VSTS_HTTP_PROXY_USERNAME' + 'VSTS_HTTP_PROXY_PASSWORD' + 'LD_LIBRARY_PATH' + 'PERL5LIB' + ) + +envContents="" + +if [ -f ".env" ]; then + envContents=`cat .env` +else + touch .env +fi + +function writeVar() +{ + checkVar="$1" + checkDelim="${1}=" + if test "${envContents#*$checkDelim}" = "$envContents" + then + if [ ! -z "${!checkVar}" ]; then + echo "${checkVar}=${!checkVar}">>.env + fi + fi +} + +echo $PATH>.path + +for var_name in ${varCheckList[@]} +do + writeVar "${var_name}" +done diff --git a/src/Misc/layoutroot/run.cmd b/src/Misc/layoutroot/run.cmd new file mode 100644 index 00000000000..df5fd390975 --- /dev/null +++ b/src/Misc/layoutroot/run.cmd @@ -0,0 +1,33 @@ +@echo off + +rem ******************************************************************************** +rem Unblock specific files. +rem ******************************************************************************** +setlocal +if defined VERBOSE_ARG ( + set VERBOSE_ARG='Continue' +) else ( + set VERBOSE_ARG='SilentlyContinue' +) + +rem Unblock files in the root of the layout folder. E.g. .cmd files. +powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "$VerbosePreference = %VERBOSE_ARG% ; Get-ChildItem -LiteralPath '%~dp0' | ForEach-Object { Write-Verbose ('Unblock: {0}' -f $_.FullName) ; $_ } | Unblock-File | Out-Null" + +if /i "%~1" equ "localRun" ( + rem ******************************************************************************** + rem Local run. + rem ******************************************************************************** + "%~dp0bin\Runner.Listener.exe" %* +) else ( + rem ******************************************************************************** + rem Run. + rem ******************************************************************************** + "%~dp0bin\Runner.Listener.exe" run %* + + rem Return code 4 means the run once runner received an update message. + rem Sleep 5 seconds to wait for the update process finish and run the runner again. + if ERRORLEVEL 4 ( + timeout /t 5 /nobreak > NUL + "%~dp0bin\Runner.Listener.exe" run %* + ) +) diff --git a/src/Misc/layoutroot/run.sh b/src/Misc/layoutroot/run.sh new file mode 100755 index 00000000000..c874d8274b4 --- /dev/null +++ b/src/Misc/layoutroot/run.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Validate not sudo +user_id=`id -u` +if [ $user_id -eq 0 -a -z "$AGENT_ALLOW_RUNASROOT" ]; then + echo "Must not run interactively with sudo" + exit 1 +fi + +# Change directory to the script root directory +# https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within +SOURCE="${BASH_SOURCE[0]}" +while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located +done +DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + +# Do not "cd $DIR". For localRun, the current directory is expected to be the repo location on disk. + +# Run +shopt -s nocasematch +if [[ "$1" == "localRun" ]]; then + "$DIR"/bin/Runner.Listener $* +else + "$DIR"/bin/Runner.Listener run $* + +# Return code 4 means the run once agent received an update message. +# Sleep 5 seconds to wait for the update process finish and run the agent again. + returnCode=$? + if [[ $returnCode == 4 ]]; then + if [ ! -x "$(command -v sleep)" ]; then + if [ ! -x "$(command -v ping)" ]; then + COUNT="0" + while [[ $COUNT != 5000 ]]; do + echo "SLEEP" >nul + COUNT=$[$COUNT+1] + done + else + ping -n 5 127.0.0.1 >nul + fi + else + sleep 5 >nul + fi + + "$DIR"/bin/Runner.Listener run $* + else + exit $returnCode + fi +fi diff --git a/src/NuGet.Config b/src/NuGet.Config new file mode 100644 index 00000000000..5816f1bb346 --- /dev/null +++ b/src/NuGet.Config @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/src/Runner.Common/ActionCommand.cs b/src/Runner.Common/ActionCommand.cs new file mode 100644 index 00000000000..eff88742934 --- /dev/null +++ b/src/Runner.Common/ActionCommand.cs @@ -0,0 +1,253 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.Collections.Generic; + +namespace GitHub.Runner.Common +{ + public sealed class ActionCommand + { + private static readonly EscapeMapping[] _escapeMappings = new[] + { + new EscapeMapping(token: "%", replacement: "%25"), + new EscapeMapping(token: ";", replacement: "%3B"), + new EscapeMapping(token: "\r", replacement: "%0D"), + new EscapeMapping(token: "\n", replacement: "%0A"), + new EscapeMapping(token: "]", replacement: "%5D"), + }; + + private static readonly EscapeMapping[] _escapeDataMappings = new[] + { + new EscapeMapping(token: "\r", replacement: "%0D"), + new EscapeMapping(token: "\n", replacement: "%0A"), + }; + + private static readonly EscapeMapping[] _escapePropertyMappings = new[] + { + new EscapeMapping(token: "%", replacement: "%25"), + new EscapeMapping(token: "\r", replacement: "%0D"), + new EscapeMapping(token: "\n", replacement: "%0A"), + new EscapeMapping(token: ":", replacement: "%3A"), + new EscapeMapping(token: ",", replacement: "%2C"), + }; + + private readonly Dictionary _properties = new Dictionary(StringComparer.OrdinalIgnoreCase); + public const string Prefix = "##["; + public const string _commandKey = "::"; + + public ActionCommand(string command) + { + ArgUtil.NotNullOrEmpty(command, nameof(command)); + Command = command; + } + + public string Command { get; } + + + public Dictionary Properties => _properties; + + public string Data { get; set; } + + public static bool TryParseV2(string message, HashSet registeredCommands, out ActionCommand command) + { + command = null; + if (string.IsNullOrEmpty(message)) + { + return false; + } + + try + { + // the message needs to start with the keyword after trim leading space. + message = message.TrimStart(); + if (!message.StartsWith(_commandKey)) + { + return false; + } + + // Get the index of the separator between the command info and the data. + int endIndex = message.IndexOf(_commandKey, _commandKey.Length); + if (endIndex < 0) + { + return false; + } + + // Get the command info (command and properties). + int cmdIndex = _commandKey.Length; + string cmdInfo = message.Substring(cmdIndex, endIndex - cmdIndex); + + // Get the command name + int spaceIndex = cmdInfo.IndexOf(' '); + string commandName = + spaceIndex < 0 + ? cmdInfo + : cmdInfo.Substring(0, spaceIndex); + + if (registeredCommands.Contains(commandName)) + { + // Initialize the command. + command = new ActionCommand(commandName); + } + else + { + return false; + } + + // Set the properties. + if (spaceIndex > 0) + { + string propertiesStr = cmdInfo.Substring(spaceIndex + 1).Trim(); + string[] splitProperties = propertiesStr.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + foreach (string propertyStr in splitProperties) + { + string[] pair = propertyStr.Split(new[] { '=' }, count: 2, options: StringSplitOptions.RemoveEmptyEntries); + if (pair.Length == 2) + { + command.Properties[pair[0]] = UnescapeProperty(pair[1]); + } + } + } + + command.Data = UnescapeData(message.Substring(endIndex + _commandKey.Length)); + return true; + } + catch + { + command = null; + return false; + } + } + + public static bool TryParse(string message, HashSet registeredCommands, out ActionCommand command) + { + command = null; + if (string.IsNullOrEmpty(message)) + { + return false; + } + + try + { + // Get the index of the prefix. + int prefixIndex = message.IndexOf(Prefix); + if (prefixIndex < 0) + { + return false; + } + + // Get the index of the separator between the command info and the data. + int rbIndex = message.IndexOf(']', prefixIndex); + if (rbIndex < 0) + { + return false; + } + + // Get the command info (command and properties). + int cmdIndex = prefixIndex + Prefix.Length; + string cmdInfo = message.Substring(cmdIndex, rbIndex - cmdIndex); + + // Get the command name + int spaceIndex = cmdInfo.IndexOf(' '); + string commandName = + spaceIndex < 0 + ? cmdInfo + : cmdInfo.Substring(0, spaceIndex); + + if (registeredCommands.Contains(commandName)) + { + // Initialize the command. + command = new ActionCommand(commandName); + } + else + { + return false; + } + + // Set the properties. + if (spaceIndex > 0) + { + string propertiesStr = cmdInfo.Substring(spaceIndex + 1); + string[] splitProperties = propertiesStr.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries); + foreach (string propertyStr in splitProperties) + { + string[] pair = propertyStr.Split(new[] { '=' }, count: 2, options: StringSplitOptions.RemoveEmptyEntries); + if (pair.Length == 2) + { + command.Properties[pair[0]] = Unescape(pair[1]); + } + } + } + + command.Data = Unescape(message.Substring(rbIndex + 1)); + return true; + } + catch + { + command = null; + return false; + } + } + + private static string Unescape(string escaped) + { + if (string.IsNullOrEmpty(escaped)) + { + return string.Empty; + } + + string unescaped = escaped; + foreach (EscapeMapping mapping in _escapeMappings) + { + unescaped = unescaped.Replace(mapping.Replacement, mapping.Token); + } + + return unescaped; + } + + private static string UnescapeProperty(string escaped) + { + if (string.IsNullOrEmpty(escaped)) + { + return string.Empty; + } + + string unescaped = escaped; + foreach (EscapeMapping mapping in _escapePropertyMappings) + { + unescaped = unescaped.Replace(mapping.Replacement, mapping.Token); + } + + return unescaped; + } + + private static string UnescapeData(string escaped) + { + if (string.IsNullOrEmpty(escaped)) + { + return string.Empty; + } + + string unescaped = escaped; + foreach (EscapeMapping mapping in _escapeDataMappings) + { + unescaped = unescaped.Replace(mapping.Replacement, mapping.Token); + } + + return unescaped; + } + + private sealed class EscapeMapping + { + public string Replacement { get; } + public string Token { get; } + + public EscapeMapping(string token, string replacement) + { + ArgUtil.NotNullOrEmpty(token, nameof(token)); + ArgUtil.NotNullOrEmpty(replacement, nameof(replacement)); + Token = token; + Replacement = replacement; + } + } + } +} diff --git a/src/Runner.Common/ActionResult.cs b/src/Runner.Common/ActionResult.cs new file mode 100644 index 00000000000..db68b685249 --- /dev/null +++ b/src/Runner.Common/ActionResult.cs @@ -0,0 +1,15 @@ +using System; + +namespace GitHub.Runner.Common +{ + public enum ActionResult + { + Success = 0, + + Failure = 1, + + Cancelled = 2, + + Skipped = 3 + } +} \ No newline at end of file diff --git a/src/Runner.Common/AsyncManualResetEvent.cs b/src/Runner.Common/AsyncManualResetEvent.cs new file mode 100644 index 00000000000..42f5b784442 --- /dev/null +++ b/src/Runner.Common/AsyncManualResetEvent.cs @@ -0,0 +1,33 @@ +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ + //Stephen Toub: http://blogs.msdn.com/b/pfxteam/archive/2012/02/11/10266920.aspx + + public class AsyncManualResetEvent + { + private volatile TaskCompletionSource m_tcs = new TaskCompletionSource(); + + public Task WaitAsync() { return m_tcs.Task; } + + public void Set() + { + var tcs = m_tcs; + Task.Factory.StartNew(s => ((TaskCompletionSource)s).TrySetResult(true), + tcs, CancellationToken.None, TaskCreationOptions.PreferFairness, TaskScheduler.Default); + tcs.Task.Wait(); + } + + public void Reset() + { + while (true) + { + var tcs = m_tcs; + if (!tcs.Task.IsCompleted || + Interlocked.CompareExchange(ref m_tcs, new TaskCompletionSource(), tcs) == tcs) + return; + } + } + } +} diff --git a/src/Runner.Common/Capabilities/CapabilitiesManager.cs b/src/Runner.Common/Capabilities/CapabilitiesManager.cs new file mode 100644 index 00000000000..76bc5fca99f --- /dev/null +++ b/src/Runner.Common/Capabilities/CapabilitiesManager.cs @@ -0,0 +1,73 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Capabilities +{ + [ServiceLocator(Default = typeof(CapabilitiesManager))] + public interface ICapabilitiesManager : IRunnerService + { + Task> GetCapabilitiesAsync(RunnerSettings settings, CancellationToken token); + } + + public sealed class CapabilitiesManager : RunnerService, ICapabilitiesManager + { + public async Task> GetCapabilitiesAsync(RunnerSettings settings, CancellationToken cancellationToken) + { + Trace.Entering(); + ArgUtil.NotNull(settings, nameof(settings)); + + // Initialize a dictionary of capabilities. + var capabilities = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (settings.SkipCapabilitiesScan) + { + Trace.Info("Skip capabilities scan."); + return capabilities; + } + + // Get the providers. + var extensionManager = HostContext.GetService(); + IEnumerable providers = + extensionManager + .GetExtensions() + ?.OrderBy(x => x.Order); + + // Add each capability returned from each provider. + foreach (ICapabilitiesProvider provider in providers ?? new ICapabilitiesProvider[0]) + { + foreach (Capability capability in await provider.GetCapabilitiesAsync(settings, cancellationToken) ?? new List()) + { + // Make sure we mask secrets in capabilities values. + capabilities[capability.Name] = HostContext.SecretMasker.MaskSecrets(capability.Value); + } + } + + return capabilities; + } + } + + public interface ICapabilitiesProvider : IExtension + { + int Order { get; } + + Task> GetCapabilitiesAsync(RunnerSettings settings, CancellationToken cancellationToken); + } + + public sealed class Capability + { + public string Name { get; } + public string Value { get; } + + public Capability(string name, string value) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + Name = name; + Value = value ?? string.Empty; + } + } +} diff --git a/src/Runner.Common/Capabilities/RunnerCapabilitiesProvider.cs b/src/Runner.Common/Capabilities/RunnerCapabilitiesProvider.cs new file mode 100644 index 00000000000..6821e4de089 --- /dev/null +++ b/src/Runner.Common/Capabilities/RunnerCapabilitiesProvider.cs @@ -0,0 +1,86 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using Microsoft.Win32; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common.Capabilities +{ + public sealed class RunnerCapabilitiesProvider : RunnerService, ICapabilitiesProvider + { + public Type ExtensionType => typeof(ICapabilitiesProvider); + + public int Order => 99; // Process last to override prior. + + public Task> GetCapabilitiesAsync(RunnerSettings settings, CancellationToken cancellationToken) + { + ArgUtil.NotNull(settings, nameof(settings)); + var capabilities = new List(); + Add(capabilities, "Runner.Name", settings.AgentName ?? string.Empty); + Add(capabilities, "Runner.OS", VarUtil.OS); + Add(capabilities, "Runner.OSArchitecture", VarUtil.OSArchitecture); +#if OS_WINDOWS + Add(capabilities, "Runner.OSVersion", GetOSVersionString()); +#endif + Add(capabilities, "InteractiveSession", (HostContext.StartupType != StartupType.Service).ToString()); + Add(capabilities, "Runner.Version", BuildConstants.RunnerPackage.Version); + Add(capabilities, "Runner.ComputerName", Environment.MachineName ?? string.Empty); + Add(capabilities, "Runner.HomeDirectory", HostContext.GetDirectory(WellKnownDirectory.Root)); + return Task.FromResult(capabilities); + } + + private void Add(List capabilities, string name, string value) + { + Trace.Info($"Adding '{name}': '{value}'"); + capabilities.Add(new Capability(name, value)); + } + + private object GetHklmValue(string keyName, string valueName) + { + keyName = $@"HKEY_LOCAL_MACHINE\{keyName}"; + object value = Registry.GetValue(keyName, valueName, defaultValue: null); + if (object.ReferenceEquals(value, null)) + { + Trace.Info($"Key name '{keyName}', value name '{valueName}' is null."); + return null; + } + + Trace.Info($"Key name '{keyName}', value name '{valueName}': '{value}'"); + return value; + } + + private string GetOSVersionString() + { + // Do not use System.Environment.OSVersion.Version to resolve the OS version number. + // It leverages the GetVersionEx function which may report an incorrect version + // depending on the app's manifest. For details, see: + // https://msdn.microsoft.com/library/windows/desktop/ms724451(v=vs.85).aspx + + // Attempt to retrieve the major/minor version from the new registry values added in + // in Windows 10. + // + // The registry value "CurrentVersion" is unreliable in Windows 10. It contains the + // value "6.3" instead of "10.0". + object major = GetHklmValue(@"SOFTWARE\Microsoft\Windows NT\CurrentVersion", "CurrentMajorVersionNumber"); + object minor = GetHklmValue(@"SOFTWARE\Microsoft\Windows NT\CurrentVersion", "CurrentMinorVersionNumber"); + string majorMinorString; + if (major != null && minor != null) + { + majorMinorString = StringUtil.Format("{0}.{1}", major, minor); + } + else + { + // Fallback to the registry value "CurrentVersion". + majorMinorString = GetHklmValue(@"SOFTWARE\Microsoft\Windows NT\CurrentVersion", "CurrentVersion") as string; + } + + // Opted to use the registry value "CurrentBuildNumber" over "CurrentBuild". Based on brief + // internet investigation, the only difference appears to be that on Windows XP "CurrentBuild" + // was unreliable and "CurrentBuildNumber" was the correct choice. + string build = GetHklmValue(@"SOFTWARE\Microsoft\Windows NT\CurrentVersion", "CurrentBuildNumber") as string; + return StringUtil.Format("{0}.{1}", majorMinorString, build); + } + } +} diff --git a/src/Runner.Common/CommandLineParser.cs b/src/Runner.Common/CommandLineParser.cs new file mode 100644 index 00000000000..bca3b461ab8 --- /dev/null +++ b/src/Runner.Common/CommandLineParser.cs @@ -0,0 +1,128 @@ +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.Logging; +using GitHub.Runner.Sdk; + +// +// Pattern: +// cmd1 cmd2 --arg1 arg1val --aflag --arg2 arg2val +// + +namespace GitHub.Runner.Common +{ + public sealed class CommandLineParser + { + private ISecretMasker _secretMasker; + private Tracing _trace; + + public List Commands { get; } + public HashSet Flags { get; } + public Dictionary Args { get; } + public HashSet SecretArgNames { get; } + private bool HasArgs { get; set; } + + public CommandLineParser(IHostContext hostContext, string[] secretArgNames) + { + _secretMasker = hostContext.SecretMasker; + _trace = hostContext.GetTrace(nameof(CommandLineParser)); + + Commands = new List(); + Flags = new HashSet(StringComparer.OrdinalIgnoreCase); + Args = new Dictionary(StringComparer.OrdinalIgnoreCase); + SecretArgNames = new HashSet(secretArgNames ?? new string[0], StringComparer.OrdinalIgnoreCase); + } + + public bool IsCommand(string name) + { + bool result = false; + if (Commands.Count > 0) + { + result = String.Equals(name, Commands[0], StringComparison.CurrentCultureIgnoreCase); + } + + return result; + } + + public void Parse(string[] args) + { + _trace.Info(nameof(Parse)); + ArgUtil.NotNull(args, nameof(args)); + _trace.Info("Parsing {0} args", args.Length); + + string argScope = null; + foreach (string arg in args) + { + _trace.Info("parsing argument"); + + HasArgs = HasArgs || arg.StartsWith("--"); + _trace.Info("HasArgs: {0}", HasArgs); + + if (string.Equals(arg, "/?", StringComparison.Ordinal)) + { + Flags.Add("help"); + } + else if (!HasArgs) + { + _trace.Info("Adding Command: {0}", arg); + Commands.Add(arg.Trim()); + } + else + { + // it's either an arg, an arg value or a flag + if (arg.StartsWith("--") && arg.Length > 2) + { + string argVal = arg.Substring(2); + _trace.Info("arg: {0}", argVal); + + // this means two --args in a row which means previous was a flag + if (argScope != null) + { + _trace.Info("Adding flag: {0}", argScope); + Flags.Add(argScope.Trim()); + } + + argScope = argVal; + } + else if (!arg.StartsWith("-")) + { + // we found a value - check if we're in scope of an arg + if (argScope != null && !Args.ContainsKey(argScope = argScope.Trim())) + { + if (SecretArgNames.Contains(argScope)) + { + _secretMasker.AddValue(arg); + } + + _trace.Info("Adding option '{0}': '{1}'", argScope, arg); + // ignore duplicates - first wins - below will be val1 + // --arg1 val1 --arg1 val1 + Args.Add(argScope, arg); + argScope = null; + } + } + else + { + // + // ignoring the second value for an arg (val2 below) + // --arg val1 val2 + + // ignoring invalid things like empty - and -- + // --arg val1 -- --flag + _trace.Info("Ignoring arg"); + } + } + } + + _trace.Verbose("done parsing arguments"); + + // handle last arg being a flag + if (argScope != null) + { + Flags.Add(argScope); + } + + _trace.Verbose("Exiting parse"); + } + } +} diff --git a/src/Runner.Common/ConfigurationStore.cs b/src/Runner.Common/ConfigurationStore.cs new file mode 100644 index 00000000000..d63d0fdedb2 --- /dev/null +++ b/src/Runner.Common/ConfigurationStore.cs @@ -0,0 +1,252 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System.IO; +using System.Runtime.Serialization; +using System.Text; +using System.Threading; + +namespace GitHub.Runner.Common +{ + // + // Settings are persisted in this structure + // + [DataContract] + public sealed class RunnerSettings + { + [DataMember(EmitDefaultValue = false)] + public bool AcceptTeeEula { get; set; } + + [DataMember(EmitDefaultValue = false)] + public int AgentId { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string AgentName { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string NotificationPipeName { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string NotificationSocketAddress { get; set; } + + [DataMember(EmitDefaultValue = false)] + public bool SkipCapabilitiesScan { get; set; } + + [DataMember(EmitDefaultValue = false)] + public bool SkipSessionRecover { get; set; } + + [DataMember(EmitDefaultValue = false)] + public int PoolId { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string PoolName { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string ServerUrl { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string GitHubUrl { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string WorkFolder { get; set; } + + [DataMember(EmitDefaultValue = false)] + public string MonitorSocketAddress { get; set; } + } + + [DataContract] + public sealed class RunnerRuntimeOptions + { +#if OS_WINDOWS + [DataMember(EmitDefaultValue = false)] + public bool GitUseSecureChannel { get; set; } +#endif + } + + [ServiceLocator(Default = typeof(ConfigurationStore))] + public interface IConfigurationStore : IRunnerService + { + bool IsConfigured(); + bool IsServiceConfigured(); + bool HasCredentials(); + CredentialData GetCredentials(); + RunnerSettings GetSettings(); + void SaveCredential(CredentialData credential); + void SaveSettings(RunnerSettings settings); + void DeleteCredential(); + void DeleteSettings(); + RunnerRuntimeOptions GetRunnerRuntimeOptions(); + void SaveRunnerRuntimeOptions(RunnerRuntimeOptions options); + void DeleteRunnerRuntimeOptions(); + } + + public sealed class ConfigurationStore : RunnerService, IConfigurationStore + { + private string _binPath; + private string _configFilePath; + private string _credFilePath; + private string _serviceConfigFilePath; + private string _runtimeOptionsFilePath; + + private CredentialData _creds; + private RunnerSettings _settings; + private RunnerRuntimeOptions _runtimeOptions; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + var currentAssemblyLocation = System.Reflection.Assembly.GetEntryAssembly().Location; + Trace.Info("currentAssemblyLocation: {0}", currentAssemblyLocation); + + _binPath = HostContext.GetDirectory(WellKnownDirectory.Bin); + Trace.Info("binPath: {0}", _binPath); + + RootFolder = HostContext.GetDirectory(WellKnownDirectory.Root); + Trace.Info("RootFolder: {0}", RootFolder); + + _configFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Runner); + Trace.Info("ConfigFilePath: {0}", _configFilePath); + + _credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials); + Trace.Info("CredFilePath: {0}", _credFilePath); + + _serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service); + Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath); + + _runtimeOptionsFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Options); + Trace.Info("RuntimeOptionsFilePath: {0}", _runtimeOptionsFilePath); + } + + public string RootFolder { get; private set; } + + public bool HasCredentials() + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + Trace.Info("HasCredentials()"); + bool credsStored = (new FileInfo(_credFilePath)).Exists; + Trace.Info("stored {0}", credsStored); + return credsStored; + } + + public bool IsConfigured() + { + Trace.Info("IsConfigured()"); + bool configured = HostContext.RunMode == RunMode.Local || (new FileInfo(_configFilePath)).Exists; + Trace.Info("IsConfigured: {0}", configured); + return configured; + } + + public bool IsServiceConfigured() + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + Trace.Info("IsServiceConfigured()"); + bool serviceConfigured = (new FileInfo(_serviceConfigFilePath)).Exists; + Trace.Info($"IsServiceConfigured: {serviceConfigured}"); + return serviceConfigured; + } + + public CredentialData GetCredentials() + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + if (_creds == null) + { + _creds = IOUtil.LoadObject(_credFilePath); + } + + return _creds; + } + + public RunnerSettings GetSettings() + { + if (_settings == null) + { + RunnerSettings configuredSettings = null; + if (File.Exists(_configFilePath)) + { + string json = File.ReadAllText(_configFilePath, Encoding.UTF8); + Trace.Info($"Read setting file: {json.Length} chars"); + configuredSettings = StringUtil.ConvertFromJson(json); + } + + ArgUtil.NotNull(configuredSettings, nameof(configuredSettings)); + _settings = configuredSettings; + } + + return _settings; + } + + public void SaveCredential(CredentialData credential) + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + Trace.Info("Saving {0} credential @ {1}", credential.Scheme, _credFilePath); + if (File.Exists(_credFilePath)) + { + // Delete existing credential file first, since the file is hidden and not able to overwrite. + Trace.Info("Delete exist runner credential file."); + IOUtil.DeleteFile(_credFilePath); + } + + IOUtil.SaveObject(credential, _credFilePath); + Trace.Info("Credentials Saved."); + File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden); + } + + public void SaveSettings(RunnerSettings settings) + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + Trace.Info("Saving runner settings."); + if (File.Exists(_configFilePath)) + { + // Delete existing runner settings file first, since the file is hidden and not able to overwrite. + Trace.Info("Delete exist runner settings file."); + IOUtil.DeleteFile(_configFilePath); + } + + IOUtil.SaveObject(settings, _configFilePath); + Trace.Info("Settings Saved."); + File.SetAttributes(_configFilePath, File.GetAttributes(_configFilePath) | FileAttributes.Hidden); + } + + public void DeleteCredential() + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + IOUtil.Delete(_credFilePath, default(CancellationToken)); + } + + public void DeleteSettings() + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + IOUtil.Delete(_configFilePath, default(CancellationToken)); + } + + public RunnerRuntimeOptions GetRunnerRuntimeOptions() + { + if (_runtimeOptions == null && File.Exists(_runtimeOptionsFilePath)) + { + _runtimeOptions = IOUtil.LoadObject(_runtimeOptionsFilePath); + } + + return _runtimeOptions; + } + + public void SaveRunnerRuntimeOptions(RunnerRuntimeOptions options) + { + Trace.Info("Saving runtime options."); + if (File.Exists(_runtimeOptionsFilePath)) + { + // Delete existing runtime options file first, since the file is hidden and not able to overwrite. + Trace.Info("Delete exist runtime options file."); + IOUtil.DeleteFile(_runtimeOptionsFilePath); + } + + IOUtil.SaveObject(options, _runtimeOptionsFilePath); + Trace.Info("Options Saved."); + File.SetAttributes(_runtimeOptionsFilePath, File.GetAttributes(_runtimeOptionsFilePath) | FileAttributes.Hidden); + } + + public void DeleteRunnerRuntimeOptions() + { + IOUtil.Delete(_runtimeOptionsFilePath, default(CancellationToken)); + } + } +} diff --git a/src/Runner.Common/Constants.cs b/src/Runner.Common/Constants.cs new file mode 100644 index 00000000000..9825b07053f --- /dev/null +++ b/src/Runner.Common/Constants.cs @@ -0,0 +1,343 @@ +using System; + +namespace GitHub.Runner.Common +{ + public enum RunMode + { + Normal, // Keep "Normal" first (default value). + Local, + } + + public enum WellKnownDirectory + { + Bin, + Diag, + Externals, + Root, + Actions, + Temp, + Tools, + Update, + Work, + } + + public enum WellKnownConfigFile + { + Runner, + Credentials, + RSACredentials, + Service, + CredentialStore, + Certificates, + Proxy, + ProxyCredentials, + ProxyBypass, + Options, + } + + public static class Constants + { + /// Path environment variable name. +#if OS_WINDOWS + public static readonly string PathVariable = "Path"; +#else + public static readonly string PathVariable = "PATH"; +#endif + + public static string ProcessTrackingId = "RUNNER_TRACKING_ID"; + public static string PluginTracePrefix = "##[plugin.trace]"; + public static readonly int RunnerDownloadRetryMaxAttempts = 3; + + // This enum is embedded within the Constants class to make it easier to reference and avoid + // ambiguous type reference with System.Runtime.InteropServices.OSPlatform and System.Runtime.InteropServices.Architecture + public enum OSPlatform + { + OSX, + Linux, + Windows + } + + public enum Architecture + { + X86, + X64, + Arm, + Arm64 + } + + public static class Runner + { +#if OS_LINUX + public static readonly OSPlatform Platform = OSPlatform.Linux; +#elif OS_OSX + public static readonly OSPlatform Platform = OSPlatform.OSX; +#elif OS_WINDOWS + public static readonly OSPlatform Platform = OSPlatform.Windows; +#endif + +#if X86 + public static readonly Architecture PlatformArchitecture = Architecture.X86; +#elif X64 + public static readonly Architecture PlatformArchitecture = Architecture.X64; +#elif ARM + public static readonly Architecture PlatformArchitecture = Architecture.Arm; +#elif ARM64 + public static readonly Architecture PlatformArchitecture = Architecture.Arm64; +#endif + + public static readonly TimeSpan ExitOnUnloadTimeout = TimeSpan.FromSeconds(30); + + public static class CommandLine + { + //if you are adding a new arg, please make sure you update the + //validArgs array as well present in the CommandSettings.cs + public static class Args + { + public static readonly string Agent = "agent"; + public static readonly string Auth = "auth"; + public static readonly string CollectionName = "collectionname"; + public static readonly string DeploymentGroupName = "deploymentgroupname"; + public static readonly string DeploymentPoolName = "deploymentpoolname"; + public static readonly string DeploymentGroupTags = "deploymentgrouptags"; + public static readonly string MachineGroupName = "machinegroupname"; + public static readonly string MachineGroupTags = "machinegrouptags"; + public static readonly string Matrix = "matrix"; + public static readonly string MonitorSocketAddress = "monitorsocketaddress"; + public static readonly string NotificationPipeName = "notificationpipename"; + public static readonly string NotificationSocketAddress = "notificationsocketaddress"; + public static readonly string Pool = "pool"; + public static readonly string ProjectName = "projectname"; + public static readonly string ProxyUrl = "proxyurl"; + public static readonly string ProxyUserName = "proxyusername"; + public static readonly string SslCACert = "sslcacert"; + public static readonly string SslClientCert = "sslclientcert"; + public static readonly string SslClientCertKey = "sslclientcertkey"; + public static readonly string SslClientCertArchive = "sslclientcertarchive"; + public static readonly string SslClientCertPassword = "sslclientcertpassword"; + public static readonly string StartupType = "startuptype"; + public static readonly string Url = "url"; + public static readonly string UserName = "username"; + public static readonly string WindowsLogonAccount = "windowslogonaccount"; + public static readonly string Work = "work"; + public static readonly string Yml = "yml"; + + // Secret args. Must be added to the "Secrets" getter as well. + public static readonly string Password = "password"; + public static readonly string ProxyPassword = "proxypassword"; + public static readonly string Token = "token"; + public static readonly string WindowsLogonPassword = "windowslogonpassword"; + public static string[] Secrets => new[] + { + Password, + ProxyPassword, + SslClientCertPassword, + Token, + WindowsLogonPassword, + }; + } + + public static class Commands + { + public static readonly string Configure = "configure"; + public static readonly string LocalRun = "localRun"; + public static readonly string Remove = "remove"; + public static readonly string Run = "run"; + public static readonly string Warmup = "warmup"; + } + + //if you are adding a new flag, please make sure you update the + //validFlags array as well present in the CommandSettings.cs + public static class Flags + { + public static readonly string AcceptTeeEula = "acceptteeeula"; + public static readonly string AddDeploymentGroupTags = "adddeploymentgrouptags"; + public static readonly string AddMachineGroupTags = "addmachinegrouptags"; + public static readonly string Commit = "commit"; + public static readonly string DeploymentGroup = "deploymentgroup"; + public static readonly string DeploymentPool = "deploymentpool"; + public static readonly string OverwriteAutoLogon = "overwriteautologon"; + public static readonly string GitUseSChannel = "gituseschannel"; + public static readonly string Help = "help"; + public static readonly string MachineGroup = "machinegroup"; + public static readonly string Replace = "replace"; + public static readonly string NoRestart = "norestart"; + public static readonly string LaunchBrowser = "launchbrowser"; + public static readonly string Once = "once"; + public static readonly string RunAsAutoLogon = "runasautologon"; + public static readonly string RunAsService = "runasservice"; + public static readonly string SslSkipCertValidation = "sslskipcertvalidation"; + public static readonly string Unattended = "unattended"; + public static readonly string Version = "version"; + public static readonly string WhatIf = "whatif"; + } + } + + public static class ReturnCode + { + public const int Success = 0; + public const int TerminatedError = 1; + public const int RetryableError = 2; + public const int RunnerUpdating = 3; + public const int RunOnceRunnerUpdating = 4; + } + } + + public static class Pipeline + { + public static class Path + { + public static readonly string PipelineMappingDirectory = "_PipelineMapping"; + public static readonly string TrackingConfigFile = "PipelineFolder.json"; + } + } + + public static class Configuration + { + public static readonly string AAD = "AAD"; + public static readonly string OAuthAccessToken = "OAuthAccessToken"; + public static readonly string PAT = "PAT"; + public static readonly string OAuth = "OAuth"; + } + + public static class Expressions + { + public static readonly string Always = "always"; + public static readonly string Canceled = "canceled"; + public static readonly string Cancelled = "cancelled"; + public static readonly string Failed = "failed"; + public static readonly string Failure = "failure"; + public static readonly string Success = "success"; + public static readonly string Succeeded = "succeeded"; + public static readonly string SucceededOrFailed = "succeededOrFailed"; + public static readonly string Variables = "variables"; + } + + public static class Path + { + public static readonly string ActionsDirectory = "_actions"; + public static readonly string ActionManifestFile = "action.yml"; + public static readonly string BinDirectory = "bin"; + public static readonly string DiagDirectory = "_diag"; + public static readonly string ExternalsDirectory = "externals"; + public static readonly string RunnerDiagnosticLogPrefix = "Runner_"; + public static readonly string TempDirectory = "_temp"; + public static readonly string TeeDirectory = "tee"; + public static readonly string ToolDirectory = "_tool"; + public static readonly string TaskJsonFile = "task.json"; + public static readonly string UpdateDirectory = "_update"; + public static readonly string WorkDirectory = "_work"; + public static readonly string WorkerDiagnosticLogPrefix = "Worker_"; + } + + // Related to definition variables. + public static class Variables + { + public static readonly string MacroPrefix = "$("; + public static readonly string MacroSuffix = ")"; + + public static class Actions + { + // + // Keep alphabetical + // + public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG"; + public static readonly string StepDebug = "ACTIONS_STEP_DEBUG"; + } + + public static class Agent + { + // + // Keep alphabetical + // + public static readonly string AcceptTeeEula = "agent.acceptteeeula"; + public static readonly string AllowAllEndpoints = "agent.allowAllEndpoints"; // remove after sprint 120 or so. + public static readonly string AllowAllSecureFiles = "agent.allowAllSecureFiles"; // remove after sprint 121 or so. + public static readonly string BuildDirectory = "agent.builddirectory"; + public static readonly string ContainerId = "agent.containerid"; + public static readonly string ContainerNetwork = "agent.containernetwork"; + public static readonly string HomeDirectory = "agent.homedirectory"; + public static readonly string Id = "agent.id"; + public static readonly string GitUseSChannel = "agent.gituseschannel"; + public static readonly string JobName = "agent.jobname"; + public static readonly string MachineName = "agent.machinename"; + public static readonly string Name = "agent.name"; + public static readonly string OS = "agent.os"; + public static readonly string OSArchitecture = "agent.osarchitecture"; + public static readonly string OSVersion = "agent.osversion"; + public static readonly string ProxyUrl = "agent.proxyurl"; + public static readonly string ProxyUsername = "agent.proxyusername"; + public static readonly string ProxyPassword = "agent.proxypassword"; + public static readonly string ProxyBypassList = "agent.proxybypasslist"; + public static readonly string RetainDefaultEncoding = "agent.retainDefaultEncoding"; + public static readonly string RootDirectory = "agent.RootDirectory"; + public static readonly string RunMode = "agent.runmode"; + public static readonly string ServerOMDirectory = "agent.ServerOMDirectory"; + public static readonly string ServicePortPrefix = "agent.services"; + public static readonly string SslCAInfo = "agent.cainfo"; + public static readonly string SslClientCert = "agent.clientcert"; + public static readonly string SslClientCertKey = "agent.clientcertkey"; + public static readonly string SslClientCertArchive = "agent.clientcertarchive"; + public static readonly string SslClientCertPassword = "agent.clientcertpassword"; + public static readonly string SslSkipCertValidation = "agent.skipcertvalidation"; + public static readonly string TempDirectory = "agent.TempDirectory"; + public static readonly string ToolsDirectory = "agent.ToolsDirectory"; + public static readonly string Version = "agent.version"; + public static readonly string WorkFolder = "agent.workfolder"; + public static readonly string WorkingDirectory = "agent.WorkingDirectory"; + } + + public static class Build + { + // + // Keep alphabetical + // + public static readonly string ArtifactStagingDirectory = "build.artifactstagingdirectory"; + public static readonly string BinariesDirectory = "build.binariesdirectory"; + public static readonly string Number = "build.buildNumber"; + public static readonly string Clean = "build.clean"; + public static readonly string DefinitionName = "build.definitionname"; + public static readonly string GatedRunCI = "build.gated.runci"; + public static readonly string GatedShelvesetName = "build.gated.shelvesetname"; + public static readonly string RepoClean = "build.repository.clean"; + public static readonly string RepoGitSubmoduleCheckout = "build.repository.git.submodulecheckout"; + public static readonly string RepoId = "build.repository.id"; + public static readonly string RepoLocalPath = "build.repository.localpath"; + public static readonly string RepoName = "build.Repository.name"; + public static readonly string RepoProvider = "build.repository.provider"; + public static readonly string RepoTfvcWorkspace = "build.repository.tfvc.workspace"; + public static readonly string RepoUri = "build.repository.uri"; + public static readonly string SourceBranch = "build.sourcebranch"; + public static readonly string SourceTfvcShelveset = "build.sourcetfvcshelveset"; + public static readonly string SourceVersion = "build.sourceversion"; + public static readonly string SourcesDirectory = "build.sourcesdirectory"; + public static readonly string StagingDirectory = "build.stagingdirectory"; + public static readonly string SyncSources = "build.syncSources"; + } + + + public static class System + { + // + // Keep alphabetical + // + public static readonly string AccessToken = "system.accessToken"; + public static readonly string ArtifactsDirectory = "system.artifactsdirectory"; + public static readonly string CollectionId = "system.collectionid"; + public static readonly string Culture = "system.culture"; + public static readonly string DefaultWorkingDirectory = "system.defaultworkingdirectory"; + public static readonly string DefinitionId = "system.definitionid"; + public static readonly string EnableAccessToken = "system.enableAccessToken"; + public static readonly string HostType = "system.hosttype"; + public static readonly string PhaseDisplayName = "system.phaseDisplayName"; + public static readonly string PreferGitFromPath = "system.prefergitfrompath"; + public static readonly string PullRequestTargetBranchName = "system.pullrequest.targetbranch"; + public static readonly string SelfManageGitCreds = "system.selfmanagegitcreds"; + public static readonly string ServerType = "system.servertype"; + public static readonly string TFServerUrl = "system.TeamFoundationServerUri"; // back compat variable, do not document + public static readonly string TeamProject = "system.teamproject"; + public static readonly string TeamProjectId = "system.teamProjectId"; + public static readonly string WorkFolder = "system.workfolder"; + } + } + } +} diff --git a/src/Runner.Common/CredentialData.cs b/src/Runner.Common/CredentialData.cs new file mode 100644 index 00000000000..86e93786a6e --- /dev/null +++ b/src/Runner.Common/CredentialData.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Runner.Common +{ + public sealed class CredentialData + { + public string Scheme { get; set; } + + public Dictionary Data + { + get + { + if (_data == null) + { + _data = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return _data; + } + } + + private Dictionary _data; + } +} diff --git a/src/Runner.Common/Exceptions.cs b/src/Runner.Common/Exceptions.cs new file mode 100644 index 00000000000..83c6edd3fa4 --- /dev/null +++ b/src/Runner.Common/Exceptions.cs @@ -0,0 +1,19 @@ +using System; + +namespace GitHub.Runner.Common +{ + public class NonRetryableException : Exception + { + public NonRetryableException() + : base() + { } + + public NonRetryableException(string message) + : base(message) + { } + + public NonRetryableException(string message, Exception inner) + : base(message, inner) + { } + } +} diff --git a/src/Runner.Common/ExtensionManager.cs b/src/Runner.Common/ExtensionManager.cs new file mode 100644 index 00000000000..dbbb060ae50 --- /dev/null +++ b/src/Runner.Common/ExtensionManager.cs @@ -0,0 +1,80 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(ExtensionManager))] + public interface IExtensionManager : IRunnerService + { + List GetExtensions() where T : class, IExtension; + } + + public sealed class ExtensionManager : RunnerService, IExtensionManager + { + private readonly ConcurrentDictionary> _cache = new ConcurrentDictionary>(); + + public List GetExtensions() where T : class, IExtension + { + Trace.Info("Getting extensions for interface: '{0}'", typeof(T).FullName); + List extensions = _cache.GetOrAdd( + key: typeof(T), + valueFactory: (Type key) => + { + return LoadExtensions(); + }); + return extensions.Select(x => x as T).ToList(); + } + + // + // We will load extensions from assembly + // once AssemblyLoadContext.Resolving event is able to + // resolve dependency recursively + // + private List LoadExtensions() where T : class, IExtension + { + var extensions = new List(); + switch (typeof(T).FullName) + { + // Listener capabilities providers. + case "GitHub.Runner.Common.Capabilities.ICapabilitiesProvider": + Add(extensions, "GitHub.Runner.Common.Capabilities.RunnerCapabilitiesProvider, Runner.Common"); + break; + // Action command extensions. + case "GitHub.Runner.Worker.IActionCommandExtension": + Add(extensions, "GitHub.Runner.Worker.InternalPluginSetRepoPathCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.SetEnvCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.SetOutputCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.SaveStateCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.AddPathCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.AddMaskCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.AddMatcherCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.RemoveMatcherCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.WarningCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.ErrorCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.DebugCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.GroupCommandExtension, Runner.Worker"); + Add(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker"); + break; + default: + // This should never happen. + throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'"); + } + + return extensions; + } + + private void Add(List extensions, string assemblyQualifiedName) where T : class, IExtension + { + Trace.Info($"Creating instance: {assemblyQualifiedName}"); + Type type = Type.GetType(assemblyQualifiedName, throwOnError: true); + var extension = Activator.CreateInstance(type) as T; + ArgUtil.NotNull(extension, nameof(extension)); + extension.Initialize(HostContext); + extensions.Add(extension); + } + } +} diff --git a/src/Runner.Common/Extensions.cs b/src/Runner.Common/Extensions.cs new file mode 100644 index 00000000000..fce2e6c16e8 --- /dev/null +++ b/src/Runner.Common/Extensions.cs @@ -0,0 +1,30 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ + //this code is documented on http://blogs.msdn.com/b/pfxteam/archive/2012/10/05/how-do-i-cancel-non-cancelable-async-operations.aspx + public static class Extensions + { + public static async Task WithCancellation(this Task task, CancellationToken cancellationToken) + { + var tcs = new TaskCompletionSource(); + using (cancellationToken.Register( + s => ((TaskCompletionSource)s).TrySetResult(true), tcs)) + if (task != await Task.WhenAny(task, tcs.Task)) + throw new OperationCanceledException(cancellationToken); + return await task; + } + + public static async Task WithCancellation(this Task task, CancellationToken cancellationToken) + { + var tcs = new TaskCompletionSource(); + using (cancellationToken.Register( + s => ((TaskCompletionSource)s).TrySetResult(true), tcs)) + if (task != await Task.WhenAny(task, tcs.Task)) + throw new OperationCanceledException(cancellationToken); + await task; + } + } +} diff --git a/src/Runner.Common/HostContext.cs b/src/Runner.Common/HostContext.cs new file mode 100644 index 00000000000..8cb8d010371 --- /dev/null +++ b/src/Runner.Common/HostContext.cs @@ -0,0 +1,597 @@ +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Runtime.Loader; +using System.Threading; +using System.Threading.Tasks; +using System.Diagnostics; +using System.Net.Http; +using System.Diagnostics.Tracing; +using GitHub.DistributedTask.Logging; +using System.Net.Http.Headers; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + public interface IHostContext : IDisposable + { + RunMode RunMode { get; set; } + StartupType StartupType { get; set; } + CancellationToken RunnerShutdownToken { get; } + ShutdownReason RunnerShutdownReason { get; } + ISecretMasker SecretMasker { get; } + ProductInfoHeaderValue UserAgent { get; } + string GetDirectory(WellKnownDirectory directory); + string GetConfigFile(WellKnownConfigFile configFile); + Tracing GetTrace(string name); + Task Delay(TimeSpan delay, CancellationToken cancellationToken); + T CreateService() where T : class, IRunnerService; + T GetService() where T : class, IRunnerService; + void SetDefaultCulture(string name); + event EventHandler Unloading; + void ShutdownRunner(ShutdownReason reason); + void WritePerfCounter(string counter); + } + + public enum StartupType + { + Manual, + Service, + AutoStartup + } + + public sealed class HostContext : EventListener, IObserver, IObserver>, IHostContext, IDisposable + { + private const int _defaultLogPageSize = 8; //MB + private static int _defaultLogRetentionDays = 30; + private static int[] _vssHttpMethodEventIds = new int[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 24 }; + private static int[] _vssHttpCredentialEventIds = new int[] { 11, 13, 14, 15, 16, 17, 18, 20, 21, 22, 27, 29 }; + private readonly ConcurrentDictionary _serviceInstances = new ConcurrentDictionary(); + private readonly ConcurrentDictionary _serviceTypes = new ConcurrentDictionary(); + private readonly ISecretMasker _secretMasker = new SecretMasker(); + private readonly ProductInfoHeaderValue _userAgent = new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version); + private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource(); + private object _perfLock = new object(); + private RunMode _runMode = RunMode.Normal; + private Tracing _trace; + private Tracing _vssTrace; + private Tracing _httpTrace; + private ITraceManager _traceManager; + private AssemblyLoadContext _loadContext; + private IDisposable _httpTraceSubscription; + private IDisposable _diagListenerSubscription; + private StartupType _startupType; + private string _perfFile; + + public event EventHandler Unloading; + public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token; + public ShutdownReason RunnerShutdownReason { get; private set; } + public ISecretMasker SecretMasker => _secretMasker; + public ProductInfoHeaderValue UserAgent => _userAgent; + public HostContext(string hostType, string logFile = null) + { + // Validate args. + ArgUtil.NotNullOrEmpty(hostType, nameof(hostType)); + + _loadContext = AssemblyLoadContext.GetLoadContext(typeof(HostContext).GetTypeInfo().Assembly); + _loadContext.Unloading += LoadContext_Unloading; + + this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscape); + this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift1); + this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift2); + this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift3); + this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift4); + this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift5); + this.SecretMasker.AddValueEncoder(ValueEncoders.ExpressionStringEscape); + this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape); + this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape); + this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape); + + // Create the trace manager. + if (string.IsNullOrEmpty(logFile)) + { + int logPageSize; + string logSizeEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGSIZE"); + if (!string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize)) + { + logPageSize = _defaultLogPageSize; + } + + int logRetentionDays; + string logRetentionDaysEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGRETENTION"); + if (!string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays)) + { + logRetentionDays = _defaultLogRetentionDays; + } + + // this should give us _diag folder under runner root directory + string diagLogDirectory = Path.Combine(new DirectoryInfo(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location)).Parent.FullName, Constants.Path.DiagDirectory); + _traceManager = new TraceManager(new HostTraceListener(diagLogDirectory, hostType, logPageSize, logRetentionDays), this.SecretMasker); + } + else + { + _traceManager = new TraceManager(new HostTraceListener(logFile), this.SecretMasker); + } + + _trace = GetTrace(nameof(HostContext)); + _vssTrace = GetTrace("GitHubActionsRunner"); // VisualStudioService + + // Enable Http trace + bool enableHttpTrace; + if (bool.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTPTRACE"), out enableHttpTrace) && enableHttpTrace) + { + _trace.Warning("*****************************************************************************************"); + _trace.Warning("** **"); + _trace.Warning("** Http trace is enabled, all your http traffic will be dumped into runner diag log. **"); + _trace.Warning("** DO NOT share the log in public place! The trace may contains secrets in plain text. **"); + _trace.Warning("** **"); + _trace.Warning("*****************************************************************************************"); + + _httpTrace = GetTrace("HttpTrace"); + _diagListenerSubscription = DiagnosticListener.AllListeners.Subscribe(this); + } + + // Enable perf counter trace + string perfCounterLocation = Environment.GetEnvironmentVariable("RUNNER_PERFLOG"); + if (!string.IsNullOrEmpty(perfCounterLocation)) + { + try + { + Directory.CreateDirectory(perfCounterLocation); + _perfFile = Path.Combine(perfCounterLocation, $"{hostType}.perf"); + } + catch (Exception ex) + { + _trace.Error(ex); + } + } + } + + public RunMode RunMode + { + get + { + return _runMode; + } + + set + { + _trace.Info($"Set run mode: {value}"); + _runMode = value; + } + } + + public string GetDirectory(WellKnownDirectory directory) + { + string path; + switch (directory) + { + case WellKnownDirectory.Bin: + path = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location); + break; + + case WellKnownDirectory.Diag: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + Constants.Path.DiagDirectory); + break; + + case WellKnownDirectory.Externals: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + Constants.Path.ExternalsDirectory); + break; + + case WellKnownDirectory.Root: + path = new DirectoryInfo(GetDirectory(WellKnownDirectory.Bin)).Parent.FullName; + break; + + case WellKnownDirectory.Temp: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.TempDirectory); + break; + + case WellKnownDirectory.Actions: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.ActionsDirectory); + break; + + case WellKnownDirectory.Tools: + // TODO: Coallesce to just check RUNNER_TOOL_CACHE when images stabilize + path = Environment.GetEnvironmentVariable("RUNNER_TOOL_CACHE") ?? Environment.GetEnvironmentVariable("RUNNER_TOOLSDIRECTORY") ?? Environment.GetEnvironmentVariable("AGENT_TOOLSDIRECTORY") ?? Environment.GetEnvironmentVariable(Constants.Variables.Agent.ToolsDirectory); + if (string.IsNullOrEmpty(path)) + { + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.ToolDirectory); + } + break; + + case WellKnownDirectory.Update: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.UpdateDirectory); + break; + + case WellKnownDirectory.Work: + var configurationStore = GetService(); + RunnerSettings settings = configurationStore.GetSettings(); + ArgUtil.NotNull(settings, nameof(settings)); + ArgUtil.NotNullOrEmpty(settings.WorkFolder, nameof(settings.WorkFolder)); + path = Path.GetFullPath(Path.Combine( + GetDirectory(WellKnownDirectory.Root), + settings.WorkFolder)); + break; + + default: + throw new NotSupportedException($"Unexpected well known directory: '{directory}'"); + } + + _trace.Info($"Well known directory '{directory}': '{path}'"); + return path; + } + + public string GetConfigFile(WellKnownConfigFile configFile) + { + string path; + switch (configFile) + { + case WellKnownConfigFile.Runner: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".runner"); + break; + + case WellKnownConfigFile.Credentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credentials"); + break; + + case WellKnownConfigFile.RSACredentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credentials_rsaparams"); + break; + + case WellKnownConfigFile.Service: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".service"); + break; + + case WellKnownConfigFile.CredentialStore: +#if OS_OSX + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credential_store.keychain"); +#else + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credential_store"); +#endif + break; + + case WellKnownConfigFile.Certificates: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".certificates"); + break; + + case WellKnownConfigFile.Proxy: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".proxy"); + break; + + case WellKnownConfigFile.ProxyCredentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".proxycredentials"); + break; + + case WellKnownConfigFile.ProxyBypass: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".proxybypass"); + break; + + case WellKnownConfigFile.Options: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".options"); + break; + default: + throw new NotSupportedException($"Unexpected well known config file: '{configFile}'"); + } + + _trace.Info($"Well known config file '{configFile}': '{path}'"); + return path; + } + + public Tracing GetTrace(string name) + { + return _traceManager[name]; + } + + public async Task Delay(TimeSpan delay, CancellationToken cancellationToken) + { + await Task.Delay(delay, cancellationToken); + } + + /// + /// Creates a new instance of T. + /// + public T CreateService() where T : class, IRunnerService + { + Type target; + if (!_serviceTypes.TryGetValue(typeof(T), out target)) + { + // Infer the concrete type from the ServiceLocatorAttribute. + CustomAttributeData attribute = typeof(T) + .GetTypeInfo() + .CustomAttributes + .FirstOrDefault(x => x.AttributeType == typeof(ServiceLocatorAttribute)); + if (attribute != null) + { + foreach (CustomAttributeNamedArgument arg in attribute.NamedArguments) + { + if (string.Equals(arg.MemberName, ServiceLocatorAttribute.DefaultPropertyName, StringComparison.Ordinal)) + { + target = arg.TypedValue.Value as Type; + } + } + } + + if (target == null) + { + throw new KeyNotFoundException(string.Format(CultureInfo.InvariantCulture, "Service mapping not found for key '{0}'.", typeof(T).FullName)); + } + + _serviceTypes.TryAdd(typeof(T), target); + target = _serviceTypes[typeof(T)]; + } + + // Create a new instance. + T svc = Activator.CreateInstance(target) as T; + svc.Initialize(this); + return svc; + } + + /// + /// Gets or creates an instance of T. + /// + public T GetService() where T : class, IRunnerService + { + // Return the cached instance if one already exists. + object instance; + if (_serviceInstances.TryGetValue(typeof(T), out instance)) + { + return instance as T; + } + + // Otherwise create a new instance and try to add it to the cache. + _serviceInstances.TryAdd(typeof(T), CreateService()); + + // Return the instance from the cache. + return _serviceInstances[typeof(T)] as T; + } + + public void SetDefaultCulture(string name) + { + ArgUtil.NotNull(name, nameof(name)); + _trace.Verbose($"Setting default culture and UI culture to: '{name}'"); + CultureInfo.DefaultThreadCurrentCulture = new CultureInfo(name); + CultureInfo.DefaultThreadCurrentUICulture = new CultureInfo(name); + } + + + public void ShutdownRunner(ShutdownReason reason) + { + ArgUtil.NotNull(reason, nameof(reason)); + _trace.Info($"Runner will be shutdown for {reason.ToString()}"); + RunnerShutdownReason = reason; + _runnerShutdownTokenSource.Cancel(); + } + + public override void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + public StartupType StartupType + { + get + { + return _startupType; + } + set + { + _startupType = value; + } + } + + public void WritePerfCounter(string counter) + { + if (!string.IsNullOrEmpty(_perfFile)) + { + string normalizedCounter = counter.Replace(':', '_'); + lock (_perfLock) + { + try + { + File.AppendAllLines(_perfFile, new[] { $"{normalizedCounter}:{DateTime.UtcNow.ToString("O")}" }); + } + catch (Exception ex) + { + _trace.Error(ex); + } + } + } + } + + private void Dispose(bool disposing) + { + // TODO: Dispose the trace listener also. + if (disposing) + { + if (_loadContext != null) + { + _loadContext.Unloading -= LoadContext_Unloading; + _loadContext = null; + } + _httpTraceSubscription?.Dispose(); + _diagListenerSubscription?.Dispose(); + _traceManager?.Dispose(); + _traceManager = null; + + _runnerShutdownTokenSource?.Dispose(); + _runnerShutdownTokenSource = null; + + base.Dispose(); + } + } + + private void LoadContext_Unloading(AssemblyLoadContext obj) + { + if (Unloading != null) + { + Unloading(this, null); + } + } + + void IObserver.OnCompleted() + { + _httpTrace.Info("DiagListeners finished transmitting data."); + } + + void IObserver.OnError(Exception error) + { + _httpTrace.Error(error); + } + + void IObserver.OnNext(DiagnosticListener listener) + { + if (listener.Name == "HttpHandlerDiagnosticListener" && _httpTraceSubscription == null) + { + _httpTraceSubscription = listener.Subscribe(this); + } + } + + void IObserver>.OnCompleted() + { + _httpTrace.Info("HttpHandlerDiagnosticListener finished transmitting data."); + } + + void IObserver>.OnError(Exception error) + { + _httpTrace.Error(error); + } + + void IObserver>.OnNext(KeyValuePair value) + { + _httpTrace.Info($"Trace {value.Key} event:{Environment.NewLine}{value.Value.ToString()}"); + } + + protected override void OnEventSourceCreated(EventSource source) + { + if (source.Name.Equals("Microsoft-VSS-Http")) + { + EnableEvents(source, EventLevel.Verbose); + } + } + + protected override void OnEventWritten(EventWrittenEventArgs eventData) + { + if (eventData == null) + { + return; + } + + string message = eventData.Message; + object[] payload = new object[0]; + if (eventData.Payload != null && eventData.Payload.Count > 0) + { + payload = eventData.Payload.ToArray(); + } + + try + { + if (_vssHttpMethodEventIds.Contains(eventData.EventId)) + { + payload[0] = Enum.Parse(typeof(VssHttpMethod), ((int)payload[0]).ToString()); + } + else if (_vssHttpCredentialEventIds.Contains(eventData.EventId)) + { + payload[0] = Enum.Parse(typeof(GitHub.Services.Common.VssCredentialsType), ((int)payload[0]).ToString()); + } + + if (payload.Length > 0) + { + message = String.Format(eventData.Message.Replace("%n", Environment.NewLine), payload); + } + + switch (eventData.Level) + { + case EventLevel.Critical: + case EventLevel.Error: + _vssTrace.Error(message); + break; + case EventLevel.Warning: + _vssTrace.Warning(message); + break; + case EventLevel.Informational: + _vssTrace.Info(message); + break; + default: + _vssTrace.Verbose(message); + break; + } + } + catch (Exception ex) + { + _vssTrace.Error(ex); + _vssTrace.Info(eventData.Message); + _vssTrace.Info(string.Join(", ", eventData.Payload?.ToArray() ?? new string[0])); + } + } + + // Copied from pipelines server code base, used for EventData translation. + internal enum VssHttpMethod + { + UNKNOWN, + DELETE, + HEAD, + GET, + OPTIONS, + PATCH, + POST, + PUT, + } + } + + public static class HostContextExtension + { + public static HttpClientHandler CreateHttpClientHandler(this IHostContext context) + { + HttpClientHandler clientHandler = new HttpClientHandler(); + var runnerWebProxy = context.GetService(); + clientHandler.Proxy = runnerWebProxy.WebProxy; + return clientHandler; + } + } + + public enum ShutdownReason + { + UserCancelled = 0, + OperatingSystemShutdown = 1, + } +} diff --git a/src/Runner.Common/HostTraceListener.cs b/src/Runner.Common/HostTraceListener.cs new file mode 100644 index 00000000000..e11edc5b492 --- /dev/null +++ b/src/Runner.Common/HostTraceListener.cs @@ -0,0 +1,202 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.Text; + +namespace GitHub.Runner.Common +{ + public sealed class HostTraceListener : TextWriterTraceListener + { + private const string _logFileNamingPattern = "{0}_{1:yyyyMMdd-HHmmss}-utc.log"; + private string _logFileDirectory; + private string _logFilePrefix; + private bool _enablePageLog = false; + private bool _enableLogRetention = false; + private int _currentPageSize; + private int _pageSizeLimit; + private int _retentionDays; + + public HostTraceListener(string logFileDirectory, string logFilePrefix, int pageSizeLimit, int retentionDays) + : base() + { + ArgUtil.NotNullOrEmpty(logFileDirectory, nameof(logFileDirectory)); + ArgUtil.NotNullOrEmpty(logFilePrefix, nameof(logFilePrefix)); + _logFileDirectory = logFileDirectory; + _logFilePrefix = logFilePrefix; + + Directory.CreateDirectory(_logFileDirectory); + + if (pageSizeLimit > 0) + { + _enablePageLog = true; + _pageSizeLimit = pageSizeLimit * 1024 * 1024; + _currentPageSize = 0; + } + + if (retentionDays > 0) + { + _enableLogRetention = true; + _retentionDays = retentionDays; + } + + Writer = CreatePageLogWriter(); + } + + public HostTraceListener(string logFile) + : base() + { + ArgUtil.NotNullOrEmpty(logFile, nameof(logFile)); + Directory.CreateDirectory(Path.GetDirectoryName(logFile)); + Stream logStream = new FileStream(logFile, FileMode.Create, FileAccess.ReadWrite, FileShare.Read, bufferSize: 4096); + Writer = new StreamWriter(logStream); + } + + // Copied and modified slightly from .Net Core source code. Modification was required to make it compile. + // There must be some TraceFilter extension class that is missing in this source code. + public override void TraceEvent(TraceEventCache eventCache, string source, TraceEventType eventType, int id, string message) + { + if (Filter != null && !Filter.ShouldTrace(eventCache, source, eventType, id, message, null, null, null)) + { + return; + } + + WriteHeader(source, eventType, id); + WriteLine(message); + WriteFooter(eventCache); + } + + public override void WriteLine(string message) + { + base.WriteLine(message); + if (_enablePageLog) + { + int messageSize = UTF8Encoding.UTF8.GetByteCount(message); + _currentPageSize += messageSize; + if (_currentPageSize > _pageSizeLimit) + { + Flush(); + if (Writer != null) + { + Writer.Dispose(); + Writer = null; + } + + Writer = CreatePageLogWriter(); + _currentPageSize = 0; + } + } + + Flush(); + } + + public override void Write(string message) + { + base.Write(message); + if (_enablePageLog) + { + int messageSize = UTF8Encoding.UTF8.GetByteCount(message); + _currentPageSize += messageSize; + } + + Flush(); + } + + internal bool IsEnabled(TraceOptions opts) + { + return (opts & TraceOutputOptions) != 0; + } + + // Altered from the original .Net Core implementation. + private void WriteHeader(string source, TraceEventType eventType, int id) + { + string type = null; + switch (eventType) + { + case TraceEventType.Critical: + type = "CRIT"; + break; + case TraceEventType.Error: + type = "ERR "; + break; + case TraceEventType.Warning: + type = "WARN"; + break; + case TraceEventType.Information: + type = "INFO"; + break; + case TraceEventType.Verbose: + type = "VERB"; + break; + default: + type = eventType.ToString(); + break; + } + + Write(StringUtil.Format("[{0:u} {1} {2}] ", DateTime.UtcNow, type, source)); + } + + // Copied and modified slightly from .Net Core source code to make it compile. The original code + // accesses a private indentLevel field. In this code it has been modified to use the getter/setter. + private void WriteFooter(TraceEventCache eventCache) + { + if (eventCache == null) + return; + + IndentLevel++; + if (IsEnabled(TraceOptions.ProcessId)) + WriteLine("ProcessId=" + eventCache.ProcessId); + + if (IsEnabled(TraceOptions.ThreadId)) + WriteLine("ThreadId=" + eventCache.ThreadId); + + if (IsEnabled(TraceOptions.DateTime)) + WriteLine("DateTime=" + eventCache.DateTime.ToString("o", CultureInfo.InvariantCulture)); + + if (IsEnabled(TraceOptions.Timestamp)) + WriteLine("Timestamp=" + eventCache.Timestamp); + + IndentLevel--; + } + + private StreamWriter CreatePageLogWriter() + { + if (_enableLogRetention) + { + DirectoryInfo diags = new DirectoryInfo(_logFileDirectory); + var logs = diags.GetFiles($"{_logFilePrefix}*.log"); + foreach (var log in logs) + { + if (log.LastWriteTimeUtc.AddDays(_retentionDays) < DateTime.UtcNow) + { + try + { + log.Delete(); + } + catch (Exception) + { + // catch Exception and continue + // we shouldn't block logging and fail the runner if the runner can't delete an older log file. + } + } + } + } + + string fileName = StringUtil.Format(_logFileNamingPattern, _logFilePrefix, DateTime.UtcNow); + string logFile = Path.Combine(_logFileDirectory, fileName); + Stream logStream; + if (File.Exists(logFile)) + { + logStream = new FileStream(logFile, FileMode.Append, FileAccess.Write, FileShare.Read, bufferSize: 4096); + } + else + { + logStream = new FileStream(logFile, FileMode.Create, FileAccess.ReadWrite, FileShare.Read, bufferSize: 4096); + } + + return new StreamWriter(logStream); + } + } +} diff --git a/src/Runner.Common/IExtension.cs b/src/Runner.Common/IExtension.cs new file mode 100644 index 00000000000..23c5158a601 --- /dev/null +++ b/src/Runner.Common/IExtension.cs @@ -0,0 +1,9 @@ +using System; + +namespace GitHub.Runner.Common +{ + public interface IExtension : IRunnerService + { + Type ExtensionType { get; } + } +} diff --git a/src/Runner.Common/JobNotification.cs b/src/Runner.Common/JobNotification.cs new file mode 100644 index 00000000000..e7756aaa17d --- /dev/null +++ b/src/Runner.Common/JobNotification.cs @@ -0,0 +1,296 @@ +using System; +using System.IO; +using System.IO.Pipes; +using System.Net; +using System.Net.Sockets; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(JobNotification))] + public interface IJobNotification : IRunnerService, IDisposable + { + Task JobStarted(Guid jobId, string accessToken, Uri serverUrl); + Task JobCompleted(Guid jobId); + void StartClient(string pipeName, string monitorSocketAddress, CancellationToken cancellationToken); + void StartClient(string socketAddress, string monitorSocketAddress); + } + + public sealed class JobNotification : RunnerService, IJobNotification + { + private NamedPipeClientStream _outClient; + private StreamWriter _writeStream; + private Socket _socket; + private Socket _monitorSocket; + private bool _configured = false; + private bool _useSockets = false; + private bool _isMonitorConfigured = false; + + public async Task JobStarted(Guid jobId, string accessToken, Uri serverUrl) + { + Trace.Info("Entering JobStarted Notification"); + + StartMonitor(jobId, accessToken, serverUrl); + + if (_configured) + { + String message = $"Starting job: {jobId.ToString()}"; + if (_useSockets) + { + try + { + Trace.Info("Writing JobStarted to socket"); + _socket.Send(Encoding.UTF8.GetBytes(message)); + Trace.Info("Finished JobStarted writing to socket"); + } + catch (SocketException e) + { + Trace.Error($"Failed sending message \"{message}\" on socket!"); + Trace.Error(e); + } + } + else + { + Trace.Info("Writing JobStarted to pipe"); + await _writeStream.WriteLineAsync(message); + await _writeStream.FlushAsync(); + Trace.Info("Finished JobStarted writing to pipe"); + } + } + } + + public async Task JobCompleted(Guid jobId) + { + Trace.Info("Entering JobCompleted Notification"); + + await EndMonitor(); + + if (_configured) + { + String message = $"Finished job: {jobId.ToString()}"; + if (_useSockets) + { + try + { + Trace.Info("Writing JobCompleted to socket"); + _socket.Send(Encoding.UTF8.GetBytes(message)); + Trace.Info("Finished JobCompleted writing to socket"); + } + catch (SocketException e) + { + Trace.Error($"Failed sending message \"{message}\" on socket!"); + Trace.Error(e); + } + } + else + { + Trace.Info("Writing JobCompleted to pipe"); + await _writeStream.WriteLineAsync(message); + await _writeStream.FlushAsync(); + Trace.Info("Finished JobCompleted writing to pipe"); + } + } + } + + public async void StartClient(string pipeName, string monitorSocketAddress, CancellationToken cancellationToken) + { + if (pipeName != null && !_configured) + { + Trace.Info("Connecting to named pipe {0}", pipeName); + _outClient = new NamedPipeClientStream(".", pipeName, PipeDirection.Out, PipeOptions.Asynchronous); + await _outClient.ConnectAsync(cancellationToken); + _writeStream = new StreamWriter(_outClient, Encoding.UTF8); + _configured = true; + Trace.Info("Connection successful to named pipe {0}", pipeName); + } + + ConnectMonitor(monitorSocketAddress); + } + + public void StartClient(string socketAddress, string monitorSocketAddress) + { + if (!_configured) + { + try + { + string[] splitAddress = socketAddress.Split(':'); + if (splitAddress.Length != 2) + { + Trace.Error("Invalid socket address {0}. Job Notification will be disabled.", socketAddress); + return; + } + + IPAddress address; + try + { + address = IPAddress.Parse(splitAddress[0]); + } + catch (FormatException e) + { + Trace.Error("Invalid socket ip address {0}. Job Notification will be disabled",splitAddress[0]); + Trace.Error(e); + return; + } + + int port = -1; + Int32.TryParse(splitAddress[1], out port); + if (port < IPEndPoint.MinPort || port > IPEndPoint.MaxPort) + { + Trace.Error("Invalid tcp socket port {0}. Job Notification will be disabled.", splitAddress[1]); + return; + } + + _socket = new Socket(SocketType.Stream, ProtocolType.Tcp); + _socket.Connect(address, port); + Trace.Info("Connection successful to socket {0}", socketAddress); + _useSockets = true; + _configured = true; + } + catch (SocketException e) + { + Trace.Error("Connection to socket {0} failed!", socketAddress); + Trace.Error(e); + } + } + + ConnectMonitor(monitorSocketAddress); + } + + private void StartMonitor(Guid jobId, string accessToken, Uri serverUri) + { + if(String.IsNullOrEmpty(accessToken)) + { + Trace.Info("No access token could be retrieved to start the monitor."); + return; + } + + try + { + Trace.Info("Entering StartMonitor"); + if (_isMonitorConfigured) + { + String message = $"Start {jobId.ToString()} {accessToken} {serverUri.ToString()} {System.Diagnostics.Process.GetCurrentProcess().Id}"; + + Trace.Info("Writing StartMonitor to socket"); + _monitorSocket.Send(Encoding.UTF8.GetBytes(message)); + Trace.Info("Finished StartMonitor writing to socket"); + } + } + catch (SocketException e) + { + Trace.Error($"Failed sending StartMonitor message on socket!"); + Trace.Error(e); + } + catch (Exception e) + { + Trace.Error($"Unexpected error occurred while sending StartMonitor message on socket!"); + Trace.Error(e); + } + } + + private async Task EndMonitor() + { + try + { + Trace.Info("Entering EndMonitor"); + if (_isMonitorConfigured) + { + String message = $"End {System.Diagnostics.Process.GetCurrentProcess().Id}"; + Trace.Info("Writing EndMonitor to socket"); + _monitorSocket.Send(Encoding.UTF8.GetBytes(message)); + Trace.Info("Finished EndMonitor writing to socket"); + + await Task.Delay(TimeSpan.FromSeconds(2)); + } + } + catch (SocketException e) + { + Trace.Error($"Failed sending end message on socket!"); + Trace.Error(e); + } + catch (Exception e) + { + Trace.Error($"Unexpected error occurred while sending StartMonitor message on socket!"); + Trace.Error(e); + } + } + + private void ConnectMonitor(string monitorSocketAddress) + { + int port = -1; + if (!_isMonitorConfigured && !String.IsNullOrEmpty(monitorSocketAddress)) + { + try + { + string[] splitAddress = monitorSocketAddress.Split(':'); + if (splitAddress.Length != 2) + { + Trace.Error("Invalid socket address {0}. Unable to connect to monitor.", monitorSocketAddress); + return; + } + + IPAddress address; + try + { + address = IPAddress.Parse(splitAddress[0]); + } + catch (FormatException e) + { + Trace.Error("Invalid socket IP address {0}. Unable to connect to monitor.", splitAddress[0]); + Trace.Error(e); + return; + } + + Int32.TryParse(splitAddress[1], out port); + if (port < IPEndPoint.MinPort || port > IPEndPoint.MaxPort) + { + Trace.Error("Invalid TCP socket port {0}. Unable to connect to monitor.", splitAddress[1]); + return; + } + + + Trace.Verbose("Trying to connect to monitor at port {0}", port); + _monitorSocket = new Socket(SocketType.Stream, ProtocolType.Tcp); + _monitorSocket.Connect(address, port); + Trace.Info("Connection successful to local port {0}", port); + _isMonitorConfigured = true; + } + catch (Exception e) + { + Trace.Error("Connection to monitor port {0} failed!", port); + Trace.Error(e); + } + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + _outClient?.Dispose(); + + if (_socket != null) + { + _socket.Send(Encoding.UTF8.GetBytes("")); + _socket.Shutdown(SocketShutdown.Both); + _socket = null; + } + + if (_monitorSocket != null) + { + _monitorSocket.Send(Encoding.UTF8.GetBytes("")); + _monitorSocket.Shutdown(SocketShutdown.Both); + _monitorSocket = null; + } + } + } + } +} diff --git a/src/Runner.Common/JobServer.cs b/src/Runner.Common/JobServer.cs new file mode 100644 index 00000000000..d7576199e81 --- /dev/null +++ b/src/Runner.Common/JobServer.cs @@ -0,0 +1,162 @@ +using GitHub.DistributedTask.WebApi; +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.WebApi; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(JobServer))] + public interface IJobServer : IRunnerService + { + Task ConnectAsync(VssConnection jobConnection); + + // logging and console + Task AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken); + Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList lines, CancellationToken cancellationToken); + Task CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken); + Task CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken); + Task CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); + Task> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable records, CancellationToken cancellationToken); + Task RaisePlanEventAsync(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent; + Task GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); + } + + public sealed class JobServer : RunnerService, IJobServer + { + private bool _hasConnection; + private VssConnection _connection; + private TaskHttpClient _taskClient; + + public async Task ConnectAsync(VssConnection jobConnection) + { + if (HostContext.RunMode == RunMode.Local) + { + return; + } + + _connection = jobConnection; + int attemptCount = 5; + while (!_connection.HasAuthenticated && attemptCount-- > 0) + { + try + { + await _connection.ConnectAsync(); + break; + } + catch (Exception ex) when (attemptCount > 0) + { + Trace.Info($"Catch exception during connect. {attemptCount} attemp left."); + Trace.Error(ex); + } + + await Task.Delay(100); + } + + _taskClient = _connection.GetClient(); + _hasConnection = true; + } + + private void CheckConnection() + { + if (!_hasConnection) + { + throw new InvalidOperationException("SetConnection"); + } + } + + //----------------------------------------------------------------- + // Feedback: WebConsole, TimelineRecords and Logs + //----------------------------------------------------------------- + + public Task AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(null); + } + + CheckConnection(); + return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken); + } + + public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList lines, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.CompletedTask; + } + + CheckConnection(); + return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken); + } + + public Task CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(null); + } + + CheckConnection(); + return _taskClient.CreateAttachmentAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, type, name, uploadStream, cancellationToken: cancellationToken); + } + + public Task CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(null); + } + + CheckConnection(); + return _taskClient.CreateLogAsync(scopeIdentifier, hubName, planId, log, cancellationToken: cancellationToken); + } + + public Task CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(null); + } + + CheckConnection(); + return _taskClient.CreateTimelineAsync(scopeIdentifier, hubName, planId, new Timeline(timelineId), cancellationToken: cancellationToken); + } + + public Task> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable records, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult>(null); + } + + CheckConnection(); + return _taskClient.UpdateTimelineRecordsAsync(scopeIdentifier, hubName, planId, timelineId, records, cancellationToken: cancellationToken); + } + + public Task RaisePlanEventAsync(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.CompletedTask; + } + + CheckConnection(); + return _taskClient.RaisePlanEventAsync(scopeIdentifier, hubName, planId, eventData, cancellationToken: cancellationToken); + } + + public Task GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(null); + } + + CheckConnection(); + return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken); + } + } +} diff --git a/src/Runner.Common/JobServerQueue.cs b/src/Runner.Common/JobServerQueue.cs new file mode 100644 index 00000000000..5ffd7eac2e6 --- /dev/null +++ b/src/Runner.Common/JobServerQueue.cs @@ -0,0 +1,702 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Generic; +using System.Collections.Concurrent; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(JobServerQueue))] + public interface IJobServerQueue : IRunnerService, IThrottlingReporter + { + event EventHandler JobServerQueueThrottling; + Task ShutdownAsync(); + void Start(Pipelines.AgentJobRequestMessage jobRequest); + void QueueWebConsoleLine(Guid stepRecordId, string line); + void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource); + void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord); + } + + public sealed class JobServerQueue : RunnerService, IJobServerQueue + { + // Default delay for Dequeue process + private static readonly TimeSpan _aggressiveDelayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(250); + private static readonly TimeSpan _delayForWebConsoleLineDequeue = TimeSpan.FromMilliseconds(500); + private static readonly TimeSpan _delayForTimelineUpdateDequeue = TimeSpan.FromMilliseconds(500); + private static readonly TimeSpan _delayForFileUploadDequeue = TimeSpan.FromMilliseconds(1000); + + // Job message information + private Guid _scopeIdentifier; + private string _hubName; + private Guid _planId; + private Guid _jobTimelineId; + private Guid _jobTimelineRecordId; + + // queue for web console line + private readonly ConcurrentQueue _webConsoleLineQueue = new ConcurrentQueue(); + + // queue for file upload (log file or attachment) + private readonly ConcurrentQueue _fileUploadQueue = new ConcurrentQueue(); + + // queue for timeline or timeline record update (one queue per timeline) + private readonly ConcurrentDictionary> _timelineUpdateQueue = new ConcurrentDictionary>(); + + // indicate how many timelines we have, we will process _timelineUpdateQueue base on the order of timeline in this list + private readonly List _allTimelines = new List(); + + // bufferd timeline records that fail to update + private readonly Dictionary> _bufferedRetryRecords = new Dictionary>(); + + // Task for each queue's dequeue process + private Task _webConsoleLineDequeueTask; + private Task _fileUploadDequeueTask; + private Task _timelineUpdateDequeueTask; + + // common + private IJobServer _jobServer; + private Task[] _allDequeueTasks; + private readonly TaskCompletionSource _jobCompletionSource = new TaskCompletionSource(); + private bool _queueInProcess = false; + private ITerminal _term; + + public event EventHandler JobServerQueueThrottling; + + // Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds). + // Then the dequeue will happen every 500ms. + // In this way, customer still can get instance live console output on job start, + // at the same time we can cut the load to server after the build run for more than 60s + private int _webConsoleLineAggressiveDequeueCount = 0; + private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60; + private bool _webConsoleLineAggressiveDequeue = true; + private bool _firstConsoleOutputs = true; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _jobServer = hostContext.GetService(); + } + + public void Start(Pipelines.AgentJobRequestMessage jobRequest) + { + Trace.Entering(); + if (HostContext.RunMode == RunMode.Local) + { + _term = HostContext.GetService(); + return; + } + + if (_queueInProcess) + { + Trace.Info("No-opt, all queue process tasks are running."); + return; + } + + ArgUtil.NotNull(jobRequest, nameof(jobRequest)); + ArgUtil.NotNull(jobRequest.Plan, nameof(jobRequest.Plan)); + ArgUtil.NotNull(jobRequest.Timeline, nameof(jobRequest.Timeline)); + + _scopeIdentifier = jobRequest.Plan.ScopeIdentifier; + _hubName = jobRequest.Plan.PlanType; + _planId = jobRequest.Plan.PlanId; + _jobTimelineId = jobRequest.Timeline.Id; + _jobTimelineRecordId = jobRequest.JobId; + + // Server already create the job timeline + _timelineUpdateQueue[_jobTimelineId] = new ConcurrentQueue(); + _allTimelines.Add(_jobTimelineId); + + // Start three dequeue task + Trace.Info("Start process web console line queue."); + _webConsoleLineDequeueTask = ProcessWebConsoleLinesQueueAsync(); + + Trace.Info("Start process file upload queue."); + _fileUploadDequeueTask = ProcessFilesUploadQueueAsync(); + + Trace.Info("Start process timeline update queue."); + _timelineUpdateDequeueTask = ProcessTimelinesUpdateQueueAsync(); + + _allDequeueTasks = new Task[] { _webConsoleLineDequeueTask, _fileUploadDequeueTask, _timelineUpdateDequeueTask }; + _queueInProcess = true; + } + + // WebConsoleLine queue and FileUpload queue are always best effort + // TimelineUpdate queue error will become critical when timeline records contain output variabls. + public async Task ShutdownAsync() + { + if (HostContext.RunMode == RunMode.Local) + { + return; + } + + if (!_queueInProcess) + { + Trace.Info("No-op, all queue process tasks have been stopped."); + } + + Trace.Info("Fire signal to shutdown all queues."); + _jobCompletionSource.TrySetResult(0); + + await Task.WhenAll(_allDequeueTasks); + _queueInProcess = false; + Trace.Info("All queue process task stopped."); + + // Drain the queue + // ProcessWebConsoleLinesQueueAsync() will never throw exception, live console update is always best effort. + Trace.Verbose("Draining web console line queue."); + await ProcessWebConsoleLinesQueueAsync(runOnce: true); + Trace.Info("Web console line queue drained."); + + // ProcessFilesUploadQueueAsync() will never throw exception, log file upload is always best effort. + Trace.Verbose("Draining file upload queue."); + await ProcessFilesUploadQueueAsync(runOnce: true); + Trace.Info("File upload queue drained."); + + // ProcessTimelinesUpdateQueueAsync() will throw exception during shutdown + // if there is any timeline records that failed to update contains output variabls. + Trace.Verbose("Draining timeline update queue."); + await ProcessTimelinesUpdateQueueAsync(runOnce: true); + Trace.Info("Timeline update queue drained."); + + Trace.Info("All queue process tasks have been stopped, and all queues are drained."); + } + + public void QueueWebConsoleLine(Guid stepRecordId, string line) + { + Trace.Verbose("Enqueue web console line queue: {0}", line); + if (HostContext.RunMode == RunMode.Local) + { + if ((line ?? string.Empty).StartsWith("##[section]")) + { + Console.WriteLine("******************************************************************************"); + Console.WriteLine(line.Substring("##[section]".Length)); + Console.WriteLine("******************************************************************************"); + } + else + { + Console.WriteLine(line); + } + + return; + } + + _webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line)); + } + + public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource) + { + if (HostContext.RunMode == RunMode.Local) + { + return; + } + + ArgUtil.NotEmpty(timelineId, nameof(timelineId)); + ArgUtil.NotEmpty(timelineRecordId, nameof(timelineRecordId)); + + // all parameter not null, file path exist. + var newFile = new UploadFileInfo() + { + TimelineId = timelineId, + TimelineRecordId = timelineRecordId, + Type = type, + Name = name, + Path = path, + DeleteSource = deleteSource + }; + + Trace.Verbose("Enqueue file upload queue: file '{0}' attach to record {1}", newFile.Path, timelineRecordId); + _fileUploadQueue.Enqueue(newFile); + } + + public void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord) + { + if (HostContext.RunMode == RunMode.Local) + { + return; + } + + ArgUtil.NotEmpty(timelineId, nameof(timelineId)); + ArgUtil.NotNull(timelineRecord, nameof(timelineRecord)); + ArgUtil.NotEmpty(timelineRecord.Id, nameof(timelineRecord.Id)); + + _timelineUpdateQueue.TryAdd(timelineId, new ConcurrentQueue()); + + Trace.Verbose("Enqueue timeline {0} update queue: {1}", timelineId, timelineRecord.Id); + _timelineUpdateQueue[timelineId].Enqueue(timelineRecord.Clone()); + } + + public void ReportThrottling(TimeSpan delay, DateTime expiration) + { + Trace.Info($"Receive server throttling report, expect delay {delay} milliseconds till {expiration}"); + var throttlingEvent = JobServerQueueThrottling; + if (throttlingEvent != null) + { + throttlingEvent(this, new ThrottlingEventArgs(delay, expiration)); + } + } + + private async Task ProcessWebConsoleLinesQueueAsync(bool runOnce = false) + { + while (!_jobCompletionSource.Task.IsCompleted || runOnce) + { + if (_webConsoleLineAggressiveDequeue && ++_webConsoleLineAggressiveDequeueCount > _webConsoleLineAggressiveDequeueLimit) + { + Trace.Info("Stop aggressive process web console line queue."); + _webConsoleLineAggressiveDequeue = false; + } + + // Group consolelines by timeline record of each step + Dictionary> stepsConsoleLines = new Dictionary>(); + List stepRecordIds = new List(); // We need to keep lines in order + int linesCounter = 0; + ConsoleLineInfo lineInfo; + while (_webConsoleLineQueue.TryDequeue(out lineInfo)) + { + if (!stepsConsoleLines.ContainsKey(lineInfo.StepRecordId)) + { + stepsConsoleLines[lineInfo.StepRecordId] = new List(); + stepRecordIds.Add(lineInfo.StepRecordId); + } + + if (!string.IsNullOrEmpty(lineInfo.Line) && lineInfo.Line.Length > 1024) + { + Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars"); + lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}..."; + } + + stepsConsoleLines[lineInfo.StepRecordId].Add(lineInfo.Line); + linesCounter++; + + // process at most about 500 lines of web console line during regular timer dequeue task. + if (!runOnce && linesCounter > 500) + { + break; + } + } + + // Batch post consolelines for each step timeline record + foreach (var stepRecordId in stepRecordIds) + { + // Split consolelines into batch, each batch will container at most 100 lines. + int batchCounter = 0; + List> batchedLines = new List>(); + foreach (var line in stepsConsoleLines[stepRecordId]) + { + var currentBatch = batchedLines.ElementAtOrDefault(batchCounter); + if (currentBatch == null) + { + batchedLines.Add(new List()); + currentBatch = batchedLines.ElementAt(batchCounter); + } + + currentBatch.Add(line); + + if (currentBatch.Count >= 100) + { + batchCounter++; + } + } + + if (batchedLines.Count > 0) + { + // When job finish, web console lines becomes less interesting to customer + // We batch and produce 500 lines of web console output every 500ms + // If customer's task produce massive of outputs, then the last queue drain run might take forever. + // So we will only upload the last 200 lines of each step from all buffered web console lines. + if (runOnce && batchedLines.Count > 2) + { + Trace.Info($"Skip {batchedLines.Count - 2} batches web console lines for last run"); + batchedLines = batchedLines.TakeLast(2).ToList(); + batchedLines[0].Insert(0, "..."); + } + + int errorCount = 0; + foreach (var batch in batchedLines) + { + try + { + // we will not requeue failed batch, since the web console lines are time sensitive. + await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch, default(CancellationToken)); + if (_firstConsoleOutputs) + { + HostContext.WritePerfCounter($"WorkerJobServerQueueAppendFirstConsoleOutput_{_planId.ToString()}"); + _firstConsoleOutputs = false; + } + } + catch (Exception ex) + { + Trace.Info("Catch exception during append web console line, keep going since the process is best effort."); + Trace.Error(ex); + errorCount++; + } + } + + Trace.Info("Try to append {0} batches web console lines for record '{2}', success rate: {1}/{0}.", batchedLines.Count, batchedLines.Count - errorCount, stepRecordId); + } + } + + if (runOnce) + { + break; + } + else + { + await Task.Delay(_webConsoleLineAggressiveDequeue ? _aggressiveDelayForWebConsoleLineDequeue : _delayForWebConsoleLineDequeue); + } + } + } + + private async Task ProcessFilesUploadQueueAsync(bool runOnce = false) + { + while (!_jobCompletionSource.Task.IsCompleted || runOnce) + { + List filesToUpload = new List(); + UploadFileInfo dequeueFile; + while (_fileUploadQueue.TryDequeue(out dequeueFile)) + { + filesToUpload.Add(dequeueFile); + // process at most 10 file upload. + if (!runOnce && filesToUpload.Count > 10) + { + break; + } + } + + if (filesToUpload.Count > 0) + { + if (runOnce) + { + Trace.Info($"Uploading {filesToUpload.Count} files in one shot."); + } + + // TODO: upload all file in parallel + int errorCount = 0; + foreach (var file in filesToUpload) + { + try + { + await UploadFile(file); + } + catch (Exception ex) + { + Trace.Info("Catch exception during log or attachment file upload, keep going since the process is best effort."); + Trace.Error(ex); + errorCount++; + + // put the failed upload file back to queue. + // TODO: figure out how should we retry paging log upload. + //lock (_fileUploadQueueLock) + //{ + // _fileUploadQueue.Enqueue(file); + //} + } + } + + Trace.Info("Try to upload {0} log files or attachments, success rate: {1}/{0}.", filesToUpload.Count, filesToUpload.Count - errorCount); + } + + if (runOnce) + { + break; + } + else + { + await Task.Delay(_delayForFileUploadDequeue); + } + } + } + + private async Task ProcessTimelinesUpdateQueueAsync(bool runOnce = false) + { + while (!_jobCompletionSource.Task.IsCompleted || runOnce) + { + List pendingUpdates = new List(); + foreach (var timeline in _allTimelines) + { + ConcurrentQueue recordQueue; + if (_timelineUpdateQueue.TryGetValue(timeline, out recordQueue)) + { + List records = new List(); + TimelineRecord record; + while (recordQueue.TryDequeue(out record)) + { + records.Add(record); + // process at most 25 timeline records update for each timeline. + if (!runOnce && records.Count > 25) + { + break; + } + } + + if (records.Count > 0) + { + pendingUpdates.Add(new PendingTimelineRecord() { TimelineId = timeline, PendingRecords = records.ToList() }); + } + } + } + + // we need track whether we have new sub-timeline been created on the last run. + // if so, we need continue update timeline record even we on the last run. + bool pendingSubtimelineUpdate = false; + List mainTimelineRecordsUpdateErrors = new List(); + if (pendingUpdates.Count > 0) + { + foreach (var update in pendingUpdates) + { + List bufferedRecords; + if (_bufferedRetryRecords.TryGetValue(update.TimelineId, out bufferedRecords)) + { + update.PendingRecords.InsertRange(0, bufferedRecords); + } + + update.PendingRecords = MergeTimelineRecords(update.PendingRecords); + + foreach (var detailTimeline in update.PendingRecords.Where(r => r.Details != null)) + { + if (!_allTimelines.Contains(detailTimeline.Details.Id)) + { + try + { + Timeline newTimeline = await _jobServer.CreateTimelineAsync(_scopeIdentifier, _hubName, _planId, detailTimeline.Details.Id, default(CancellationToken)); + _allTimelines.Add(newTimeline.Id); + pendingSubtimelineUpdate = true; + } + catch (TimelineExistsException) + { + Trace.Info("Catch TimelineExistsException during timeline creation. Ignore the error since server already had this timeline."); + _allTimelines.Add(detailTimeline.Details.Id); + } + catch (Exception ex) + { + Trace.Error(ex); + } + } + } + + try + { + await _jobServer.UpdateTimelineRecordsAsync(_scopeIdentifier, _hubName, _planId, update.TimelineId, update.PendingRecords, default(CancellationToken)); + if (_bufferedRetryRecords.Remove(update.TimelineId)) + { + Trace.Verbose("Cleanup buffered timeline record for timeline: {0}.", update.TimelineId); + } + } + catch (Exception ex) + { + Trace.Info("Catch exception during update timeline records, try to update these timeline records next time."); + Trace.Error(ex); + _bufferedRetryRecords[update.TimelineId] = update.PendingRecords.ToList(); + if (update.TimelineId == _jobTimelineId) + { + mainTimelineRecordsUpdateErrors.Add(ex); + } + } + } + } + + if (runOnce) + { + // continue process timeline records update, + // we might have more records need update, + // since we just create a new sub-timeline + if (pendingSubtimelineUpdate) + { + continue; + } + else + { + if (mainTimelineRecordsUpdateErrors.Count > 0 && + _bufferedRetryRecords.ContainsKey(_jobTimelineId) && + _bufferedRetryRecords[_jobTimelineId] != null && + _bufferedRetryRecords[_jobTimelineId].Any(r => r.Variables.Count > 0)) + { + Trace.Info("Fail to update timeline records with output variables. Throw exception to fail the job since output variables are critical to downstream jobs."); + throw new AggregateException("Failed to publish output variables.", mainTimelineRecordsUpdateErrors); + } + else + { + break; + } + } + } + else + { + await Task.Delay(_delayForTimelineUpdateDequeue); + } + } + } + + private List MergeTimelineRecords(List timelineRecords) + { + if (timelineRecords == null || timelineRecords.Count <= 1) + { + return timelineRecords; + } + + Dictionary dict = new Dictionary(); + foreach (TimelineRecord rec in timelineRecords) + { + if (rec == null) + { + continue; + } + + TimelineRecord timelineRecord; + if (dict.TryGetValue(rec.Id, out timelineRecord)) + { + // Merge rec into timelineRecord + timelineRecord.CurrentOperation = rec.CurrentOperation ?? timelineRecord.CurrentOperation; + timelineRecord.Details = rec.Details ?? timelineRecord.Details; + timelineRecord.FinishTime = rec.FinishTime ?? timelineRecord.FinishTime; + timelineRecord.Log = rec.Log ?? timelineRecord.Log; + timelineRecord.Name = rec.Name ?? timelineRecord.Name; + timelineRecord.RefName = rec.RefName ?? timelineRecord.RefName; + timelineRecord.PercentComplete = rec.PercentComplete ?? timelineRecord.PercentComplete; + timelineRecord.RecordType = rec.RecordType ?? timelineRecord.RecordType; + timelineRecord.Result = rec.Result ?? timelineRecord.Result; + timelineRecord.ResultCode = rec.ResultCode ?? timelineRecord.ResultCode; + timelineRecord.StartTime = rec.StartTime ?? timelineRecord.StartTime; + timelineRecord.State = rec.State ?? timelineRecord.State; + timelineRecord.WorkerName = rec.WorkerName ?? timelineRecord.WorkerName; + + if (rec.ErrorCount != null && rec.ErrorCount > 0) + { + timelineRecord.ErrorCount = rec.ErrorCount; + } + + if (rec.WarningCount != null && rec.WarningCount > 0) + { + timelineRecord.WarningCount = rec.WarningCount; + } + + if (rec.Issues.Count > 0) + { + timelineRecord.Issues.Clear(); + timelineRecord.Issues.AddRange(rec.Issues.Select(i => i.Clone())); + } + + if (rec.Variables.Count > 0) + { + foreach (var variable in rec.Variables) + { + timelineRecord.Variables[variable.Key] = variable.Value.Clone(); + } + } + } + else + { + dict.Add(rec.Id, rec); + } + } + + var mergedRecords = dict.Values.ToList(); + + Trace.Verbose("Merged Timeline records"); + foreach (var record in mergedRecords) + { + Trace.Verbose($" Record: t={record.RecordType}, n={record.Name}, s={record.State}, st={record.StartTime}, {record.PercentComplete}%, ft={record.FinishTime}, r={record.Result}: {record.CurrentOperation}"); + if (record.Issues != null && record.Issues.Count > 0) + { + foreach (var issue in record.Issues) + { + String source; + issue.Data.TryGetValue("sourcepath", out source); + Trace.Verbose($" Issue: c={issue.Category}, t={issue.Type}, s={source ?? string.Empty}, m={issue.Message}"); + } + } + + if (record.Variables != null && record.Variables.Count > 0) + { + foreach (var variable in record.Variables) + { + Trace.Verbose($" Variable: n={variable.Key}, secret={variable.Value.IsSecret}"); + } + } + } + + return mergedRecords; + } + + private async Task UploadFile(UploadFileInfo file) + { + bool uploadSucceed = false; + try + { + if (String.Equals(file.Type, CoreAttachmentType.Log, StringComparison.OrdinalIgnoreCase)) + { + // Create the log + var taskLog = await _jobServer.CreateLogAsync(_scopeIdentifier, _hubName, _planId, new TaskLog(String.Format(@"logs\{0:D}", file.TimelineRecordId)), default(CancellationToken)); + + // Upload the contents + using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) + { + var logUploaded = await _jobServer.AppendLogContentAsync(_scopeIdentifier, _hubName, _planId, taskLog.Id, fs, default(CancellationToken)); + } + + // Create a new record and only set the Log field + var attachmentUpdataRecord = new TimelineRecord() { Id = file.TimelineRecordId, Log = taskLog }; + QueueTimelineRecordUpdate(file.TimelineId, attachmentUpdataRecord); + } + else + { + // Create attachment + using (FileStream fs = File.Open(file.Path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) + { + var result = await _jobServer.CreateAttachmentAsync(_scopeIdentifier, _hubName, _planId, file.TimelineId, file.TimelineRecordId, file.Type, file.Name, fs, default(CancellationToken)); + } + } + + uploadSucceed = true; + } + finally + { + if (uploadSucceed && file.DeleteSource) + { + try + { + File.Delete(file.Path); + } + catch (Exception ex) + { + Trace.Info("Catch exception during delete success uploaded file."); + Trace.Error(ex); + } + } + } + } + } + + internal class PendingTimelineRecord + { + public Guid TimelineId { get; set; } + public List PendingRecords { get; set; } + } + + internal class UploadFileInfo + { + public Guid TimelineId { get; set; } + public Guid TimelineRecordId { get; set; } + public string Type { get; set; } + public string Name { get; set; } + public string Path { get; set; } + public bool DeleteSource { get; set; } + } + + + internal class ConsoleLineInfo + { + public ConsoleLineInfo(Guid recordId, string line) + { + this.StepRecordId = recordId; + this.Line = line; + } + + public Guid StepRecordId { get; set; } + public string Line { get; set; } + } +} diff --git a/src/Runner.Common/LocationServer.cs b/src/Runner.Common/LocationServer.cs new file mode 100644 index 00000000000..25e09228332 --- /dev/null +++ b/src/Runner.Common/LocationServer.cs @@ -0,0 +1,61 @@ +using System; +using System.Threading.Tasks; +using GitHub.Services.WebApi; +using GitHub.Services.Location.Client; +using GitHub.Services.Location; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(LocationServer))] + public interface ILocationServer : IRunnerService + { + Task ConnectAsync(VssConnection jobConnection); + + Task GetConnectionDataAsync(); + } + + public sealed class LocationServer : RunnerService, ILocationServer + { + private bool _hasConnection; + private VssConnection _connection; + private LocationHttpClient _locationClient; + + public async Task ConnectAsync(VssConnection jobConnection) + { + _connection = jobConnection; + int attemptCount = 5; + while (!_connection.HasAuthenticated && attemptCount-- > 0) + { + try + { + await _connection.ConnectAsync(); + break; + } + catch (Exception ex) when (attemptCount > 0) + { + Trace.Info($"Catch exception during connect. {attemptCount} attempt left."); + Trace.Error(ex); + } + + await Task.Delay(100); + } + + _locationClient = _connection.GetClient(); + _hasConnection = true; + } + + private void CheckConnection() + { + if (!_hasConnection) + { + throw new InvalidOperationException("SetConnection"); + } + } + + public async Task GetConnectionDataAsync() + { + CheckConnection(); + return await _locationClient.GetConnectionDataAsync(ConnectOptions.None, 0); + } + } +} diff --git a/src/Runner.Common/Logging.cs b/src/Runner.Common/Logging.cs new file mode 100644 index 00000000000..26a25d64482 --- /dev/null +++ b/src/Runner.Common/Logging.cs @@ -0,0 +1,124 @@ +using GitHub.Runner.Common.Util; +using System; +using System.IO; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(PagingLogger))] + public interface IPagingLogger : IRunnerService + { + long TotalLines { get; } + void Setup(Guid timelineId, Guid timelineRecordId); + + void Write(string message); + + void End(); + } + + public class PagingLogger : RunnerService, IPagingLogger + { + public static string PagingFolder = "pages"; + + // 8 MB + public const int PageSize = 8 * 1024 * 1024; + + private Guid _timelineId; + private Guid _timelineRecordId; + private string _pageId; + private FileStream _pageData; + private StreamWriter _pageWriter; + private int _byteCount; + private int _pageCount; + private long _totalLines; + private string _dataFileName; + private string _pagesFolder; + private IJobServerQueue _jobServerQueue; + + public long TotalLines => _totalLines; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _totalLines = 0; + _pageId = Guid.NewGuid().ToString(); + _pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder); + _jobServerQueue = HostContext.GetService(); + Directory.CreateDirectory(_pagesFolder); + } + + public void Setup(Guid timelineId, Guid timelineRecordId) + { + _timelineId = timelineId; + _timelineRecordId = timelineRecordId; + } + + // + // Write a metadata file with id etc, point to pages on disk. + // Each page is a guid_#. As a page rolls over, it events it's done + // and the consumer queues it for upload + // Ensure this is lazy. Create a page on first write + // + public void Write(string message) + { + // lazy creation on write + if (_pageWriter == null) + { + Create(); + } + + string line = $"{DateTime.UtcNow.ToString("O")} {message}"; + _pageWriter.WriteLine(line); + + _totalLines++; + if (line.IndexOf('\n') != -1) + { + foreach (char c in line) + { + if (c == '\n') + { + _totalLines++; + } + } + } + + _byteCount += System.Text.Encoding.UTF8.GetByteCount(line); + if (_byteCount >= PageSize) + { + NewPage(); + } + } + + public void End() + { + EndPage(); + } + + private void Create() + { + NewPage(); + } + + private void NewPage() + { + EndPage(); + _byteCount = 0; + _dataFileName = Path.Combine(_pagesFolder, $"{_pageId}_{++_pageCount}.log"); + _pageData = new FileStream(_dataFileName, FileMode.CreateNew); + _pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8); + } + + private void EndPage() + { + if (_pageWriter != null) + { + _pageWriter.Flush(); + _pageData.Flush(); + //The StreamWriter object calls Dispose() on the provided Stream object when StreamWriter.Dispose is called. + _pageWriter.Dispose(); + _pageWriter = null; + _pageData = null; + _jobServerQueue.QueueFileUpload(_timelineId, _timelineRecordId, "DistributedTask.Core.Log", "CustomToolLog", _dataFileName, true); + } + } + } +} diff --git a/src/Runner.Common/ProcessChannel.cs b/src/Runner.Common/ProcessChannel.cs new file mode 100644 index 00000000000..14d367e1edf --- /dev/null +++ b/src/Runner.Common/ProcessChannel.cs @@ -0,0 +1,100 @@ +using System; +using System.IO; +using System.IO.Pipes; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ + public delegate void StartProcessDelegate(string pipeHandleOut, string pipeHandleIn); + + public enum MessageType + { + NotInitialized = -1, + NewJobRequest = 1, + CancelRequest = 2, + RunnerShutdown = 3, + OperatingSystemShutdown = 4 + } + + public struct WorkerMessage + { + public MessageType MessageType; + public string Body; + public WorkerMessage(MessageType messageType, string body) + { + MessageType = messageType; + Body = body; + } + } + + [ServiceLocator(Default = typeof(ProcessChannel))] + public interface IProcessChannel : IDisposable, IRunnerService + { + void StartServer(StartProcessDelegate startProcess); + void StartClient(string pipeNameInput, string pipeNameOutput); + + Task SendAsync(MessageType messageType, string body, CancellationToken cancellationToken); + Task ReceiveAsync(CancellationToken cancellationToken); + } + + public sealed class ProcessChannel : RunnerService, IProcessChannel + { + private AnonymousPipeServerStream _inServer; + private AnonymousPipeServerStream _outServer; + private AnonymousPipeClientStream _inClient; + private AnonymousPipeClientStream _outClient; + private StreamString _writeStream; + private StreamString _readStream; + + public void StartServer(StartProcessDelegate startProcess) + { + _outServer = new AnonymousPipeServerStream(PipeDirection.Out, HandleInheritability.Inheritable); + _inServer = new AnonymousPipeServerStream(PipeDirection.In, HandleInheritability.Inheritable); + _readStream = new StreamString(_inServer); + _writeStream = new StreamString(_outServer); + startProcess(_outServer.GetClientHandleAsString(), _inServer.GetClientHandleAsString()); + _outServer.DisposeLocalCopyOfClientHandle(); + _inServer.DisposeLocalCopyOfClientHandle(); + } + + public void StartClient(string pipeNameInput, string pipeNameOutput) + { + _inClient = new AnonymousPipeClientStream(PipeDirection.In, pipeNameInput); + _outClient = new AnonymousPipeClientStream(PipeDirection.Out, pipeNameOutput); + _readStream = new StreamString(_inClient); + _writeStream = new StreamString(_outClient); + } + + public async Task SendAsync(MessageType messageType, string body, CancellationToken cancellationToken) + { + await _writeStream.WriteInt32Async((int)messageType, cancellationToken); + await _writeStream.WriteStringAsync(body, cancellationToken); + } + + public async Task ReceiveAsync(CancellationToken cancellationToken) + { + WorkerMessage result = new WorkerMessage(MessageType.NotInitialized, string.Empty); + result.MessageType = (MessageType)await _readStream.ReadInt32Async(cancellationToken); + result.Body = await _readStream.ReadStringAsync(cancellationToken); + return result; + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + _inServer?.Dispose(); + _outServer?.Dispose(); + _inClient?.Dispose(); + _outClient?.Dispose(); + } + } + } +} diff --git a/src/Runner.Common/ProcessExtensions.cs b/src/Runner.Common/ProcessExtensions.cs new file mode 100644 index 00000000000..5e3bbd35ba1 --- /dev/null +++ b/src/Runner.Common/ProcessExtensions.cs @@ -0,0 +1,396 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ +#if OS_WINDOWS + public static class WindowsProcessExtensions + { + // Reference: https://blogs.msdn.microsoft.com/matt_pietrek/2004/08/25/reading-another-processs-environment/ + // Reference: http://blog.gapotchenko.com/eazfuscator.net/reading-environment-variables + public static string GetEnvironmentVariable(this Process process, IHostContext hostContext, string variable) + { + var trace = hostContext.GetTrace(nameof(WindowsProcessExtensions)); + Dictionary environmentVariables = new Dictionary(StringComparer.OrdinalIgnoreCase); + IntPtr processHandle = process.SafeHandle.DangerousGetHandle(); + + IntPtr environmentBlockAddress; + if (Environment.Is64BitOperatingSystem) + { + PROCESS_BASIC_INFORMATION64 pbi = new PROCESS_BASIC_INFORMATION64(); + int returnLength = 0; + int status = NtQueryInformationProcess64(processHandle, PROCESSINFOCLASS.ProcessBasicInformation, ref pbi, Marshal.SizeOf(pbi), ref returnLength); + if (status != 0) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + bool wow64; + if (!IsWow64Process(processHandle, out wow64)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (!wow64) + { + // 64 bits process on 64 bits OS + IntPtr UserProcessParameterAddress = ReadIntPtr64(processHandle, new IntPtr(pbi.PebBaseAddress) + 0x20); + environmentBlockAddress = ReadIntPtr64(processHandle, UserProcessParameterAddress + 0x80); + } + else + { + // 32 bits process on 64 bits OS + IntPtr UserProcessParameterAddress = ReadIntPtr32(processHandle, new IntPtr(pbi.PebBaseAddress) + 0x1010); + environmentBlockAddress = ReadIntPtr32(processHandle, UserProcessParameterAddress + 0x48); + } + } + else + { + PROCESS_BASIC_INFORMATION32 pbi = new PROCESS_BASIC_INFORMATION32(); + int returnLength = 0; + int status = NtQueryInformationProcess32(processHandle, PROCESSINFOCLASS.ProcessBasicInformation, ref pbi, Marshal.SizeOf(pbi), ref returnLength); + if (status != 0) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + // 32 bits process on 32 bits OS + IntPtr UserProcessParameterAddress = ReadIntPtr32(processHandle, new IntPtr(pbi.PebBaseAddress) + 0x10); + environmentBlockAddress = ReadIntPtr32(processHandle, UserProcessParameterAddress + 0x48); + } + + MEMORY_BASIC_INFORMATION memInfo = new MEMORY_BASIC_INFORMATION(); + if (VirtualQueryEx(processHandle, environmentBlockAddress, ref memInfo, Marshal.SizeOf(memInfo)) == 0) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + Int64 dataSize = memInfo.RegionSize.ToInt64() - (environmentBlockAddress.ToInt64() - memInfo.BaseAddress.ToInt64()); + + byte[] envData = new byte[dataSize]; + IntPtr res_len = IntPtr.Zero; + if (!ReadProcessMemory(processHandle, environmentBlockAddress, envData, new IntPtr(dataSize), ref res_len)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (res_len.ToInt64() != dataSize) + { + throw new ArgumentOutOfRangeException(nameof(ReadProcessMemory)); + } + + string environmentVariableString; + Int64 environmentVariableBytesLength = 0; + // check env encoding + if (envData[0] != 0 && envData[1] == 0) + { + // Unicode + for (Int64 index = 0; index < dataSize; index++) + { + // Unicode encoded environment variables block ends up with '\0\0\0\0'. + if (environmentVariableBytesLength == 0 && + envData[index] == 0 && + index + 3 < dataSize && + envData[index + 1] == 0 && + envData[index + 2] == 0 && + envData[index + 3] == 0) + { + environmentVariableBytesLength = index + 3; + } + else if (environmentVariableBytesLength != 0) + { + // set it '\0' so we can easily trim it, most array method doesn't take int64 + envData[index] = 0; + } + } + + if (environmentVariableBytesLength == 0) + { + throw new ArgumentException(nameof(environmentVariableBytesLength)); + } + + environmentVariableString = Encoding.Unicode.GetString(envData); + } + else if (envData[0] != 0 && envData[1] != 0) + { + // ANSI + for (Int64 index = 0; index < dataSize; index++) + { + // Unicode encoded environment variables block ends up with '\0\0'. + if (environmentVariableBytesLength == 0 && + envData[index] == 0 && + index + 1 < dataSize && + envData[index + 1] == 0) + { + environmentVariableBytesLength = index + 1; + } + else if (environmentVariableBytesLength != 0) + { + // set it '\0' so we can easily trim it, most array method doesn't take int64 + envData[index] = 0; + } + } + + if (environmentVariableBytesLength == 0) + { + throw new ArgumentException(nameof(environmentVariableBytesLength)); + } + + environmentVariableString = Encoding.Default.GetString(envData); + } + else + { + throw new ArgumentException(nameof(envData)); + } + + foreach (var envString in environmentVariableString.Split("\0", StringSplitOptions.RemoveEmptyEntries)) + { + string[] env = envString.Split("=", 2); + if (!string.IsNullOrEmpty(env[0])) + { + environmentVariables[env[0]] = env[1]; + trace.Verbose($"PID:{process.Id} ({env[0]}={env[1]})"); + } + } + + if (environmentVariables.TryGetValue(variable, out string envVariable)) + { + return envVariable; + } + else + { + return null; + } + } + + private static IntPtr ReadIntPtr32(IntPtr hProcess, IntPtr ptr) + { + IntPtr readPtr = IntPtr.Zero; + IntPtr data = Marshal.AllocHGlobal(sizeof(Int32)); + try + { + IntPtr res_len = IntPtr.Zero; + if (!ReadProcessMemory(hProcess, ptr, data, new IntPtr(sizeof(Int32)), ref res_len)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (res_len.ToInt32() != sizeof(Int32)) + { + throw new ArgumentOutOfRangeException(nameof(ReadProcessMemory)); + } + + readPtr = new IntPtr(Marshal.ReadInt32(data)); + } + finally + { + Marshal.FreeHGlobal(data); + } + + return readPtr; + } + + private static IntPtr ReadIntPtr64(IntPtr hProcess, IntPtr ptr) + { + IntPtr readPtr = IntPtr.Zero; + IntPtr data = Marshal.AllocHGlobal(IntPtr.Size); + try + { + IntPtr res_len = IntPtr.Zero; + if (!ReadProcessMemory(hProcess, ptr, data, new IntPtr(sizeof(Int64)), ref res_len)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (res_len.ToInt32() != IntPtr.Size) + { + throw new ArgumentOutOfRangeException(nameof(ReadProcessMemory)); + } + + readPtr = Marshal.ReadIntPtr(data); + } + finally + { + Marshal.FreeHGlobal(data); + } + + return readPtr; + } + + private enum PROCESSINFOCLASS : int + { + ProcessBasicInformation = 0 + }; + + [StructLayout(LayoutKind.Sequential)] + private struct MEMORY_BASIC_INFORMATION + { + public IntPtr BaseAddress; + public IntPtr AllocationBase; + public int AllocationProtect; + public IntPtr RegionSize; + public int State; + public int Protect; + public int Type; + } + + [StructLayout(LayoutKind.Sequential)] + private struct PROCESS_BASIC_INFORMATION64 + { + public long ExitStatus; + public long PebBaseAddress; + public long AffinityMask; + public long BasePriority; + public long UniqueProcessId; + public long InheritedFromUniqueProcessId; + }; + + [StructLayout(LayoutKind.Sequential)] + private struct PROCESS_BASIC_INFORMATION32 + { + public int ExitStatus; + public int PebBaseAddress; + public int AffinityMask; + public int BasePriority; + public int UniqueProcessId; + public int InheritedFromUniqueProcessId; + }; + + [DllImport("ntdll.dll", SetLastError = true, EntryPoint = "NtQueryInformationProcess")] + private static extern int NtQueryInformationProcess64(IntPtr processHandle, PROCESSINFOCLASS processInformationClass, ref PROCESS_BASIC_INFORMATION64 processInformation, int processInformationLength, ref int returnLength); + + [DllImport("ntdll.dll", SetLastError = true, EntryPoint = "NtQueryInformationProcess")] + private static extern int NtQueryInformationProcess32(IntPtr processHandle, PROCESSINFOCLASS processInformationClass, ref PROCESS_BASIC_INFORMATION32 processInformation, int processInformationLength, ref int returnLength); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool IsWow64Process(IntPtr processHandle, out bool wow64Process); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool ReadProcessMemory(IntPtr hProcess, IntPtr lpBaseAddress, IntPtr lpBuffer, IntPtr dwSize, ref IntPtr lpNumberOfBytesRead); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool ReadProcessMemory(IntPtr hProcess, IntPtr lpBaseAddress, [Out] byte[] lpBuffer, IntPtr dwSize, ref IntPtr lpNumberOfBytesRead); + + [DllImport("kernel32.dll")] + private static extern int VirtualQueryEx(IntPtr processHandle, IntPtr baseAddress, ref MEMORY_BASIC_INFORMATION memoryInformation, int memoryInformationLength); + } +#else + public static class LinuxProcessExtensions + { + public static string GetEnvironmentVariable(this Process process, IHostContext hostContext, string variable) + { + var trace = hostContext.GetTrace(nameof(LinuxProcessExtensions)); + Dictionary env = new Dictionary(); + + if (Directory.Exists("/proc")) + { + string envFile = $"/proc/{process.Id}/environ"; + trace.Info($"Read env from {envFile}"); + string envContent = File.ReadAllText(envFile); + if (!string.IsNullOrEmpty(envContent)) + { + // on linux, environment variables are seprated by '\0' + var envList = envContent.Split('\0', StringSplitOptions.RemoveEmptyEntries); + foreach (var envStr in envList) + { + // split on the first '=' + var keyValuePair = envStr.Split('=', 2); + if (keyValuePair.Length == 2) + { + env[keyValuePair[0]] = keyValuePair[1]; + trace.Verbose($"PID:{process.Id} ({keyValuePair[0]}={keyValuePair[1]})"); + } + } + } + } + else + { + // On OSX, there is no /proc folder for us to read environment for given process, + // So we have call `ps e -p -o command` to print out env to STDOUT, + // However, the output env are not format in a parseable way, it's just a string that concatenate all envs with space, + // It doesn't escape '=' or ' ', so we can't parse the output into a dictionary of all envs. + // So we only look for the env you request, in the format of variable=value. (it won't work if you variable contains = or space) + trace.Info($"Read env from output of `ps e -p {process.Id} -o command`"); + List psOut = new List(); + object outputLock = new object(); + using (var p = hostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + psOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + trace.Error(stderr.Data); + } + } + }; + + int exitCode = p.ExecuteAsync(workingDirectory: hostContext.GetDirectory(WellKnownDirectory.Root), + fileName: "ps", + arguments: $"e -p {process.Id} -o command", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + trace.Info($"Successfully dump environment variables for {process.Id}"); + if (psOut.Count > 0) + { + string psOutputString = string.Join(" ", psOut); + trace.Verbose($"ps output: '{psOutputString}'"); + + int varStartIndex = psOutputString.IndexOf(variable, StringComparison.Ordinal); + if (varStartIndex >= 0) + { + string rightPart = psOutputString.Substring(varStartIndex + variable.Length + 1); + if (rightPart.IndexOf(' ') > 0) + { + string value = rightPart.Substring(0, rightPart.IndexOf(' ')); + env[variable] = value; + } + else + { + env[variable] = rightPart; + } + + trace.Verbose($"PID:{process.Id} ({variable}={env[variable]})"); + } + } + } + } + } + + if (env.TryGetValue(variable, out string envVariable)) + { + return envVariable; + } + else + { + return null; + } + } + } +#endif +} diff --git a/src/Runner.Common/ProcessInvoker.cs b/src/Runner.Common/ProcessInvoker.cs new file mode 100644 index 00000000000..021c6db5e02 --- /dev/null +++ b/src/Runner.Common/ProcessInvoker.cs @@ -0,0 +1,329 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(ProcessInvokerWrapper))] + public interface IProcessInvoker : IDisposable, IRunnerService + { + event EventHandler OutputDataReceived; + event EventHandler ErrorDataReceived; + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + bool keepStandardInOpen, + CancellationToken cancellationToken); + + Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + bool keepStandardInOpen, + bool highPriorityProcess, + CancellationToken cancellationToken); + } + + // The implementation of the process invoker does not hook up DataReceivedEvent and ErrorReceivedEvent of Process, + // instead, we read both STDOUT and STDERR stream manually on seperate thread. + // The reason is we find a huge perf issue about process STDOUT/STDERR with those events. + // + // Missing functionalities: + // 1. Cancel/Kill process tree + // 2. Make sure STDOUT and STDERR not process out of order + public sealed class ProcessInvokerWrapper : RunnerService, IProcessInvoker + { + private ProcessInvoker _invoker; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _invoker = new ProcessInvoker(Trace); + } + + public event EventHandler OutputDataReceived; + public event EventHandler ErrorDataReceived; + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: false, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: null, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: false, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: null, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: redirectStandardIn, + inheritConsoleHandler: false, + cancellationToken: cancellationToken + ); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: redirectStandardIn, + inheritConsoleHandler: inheritConsoleHandler, + keepStandardInOpen: false, + cancellationToken: cancellationToken + ); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + bool keepStandardInOpen, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: redirectStandardIn, + inheritConsoleHandler: inheritConsoleHandler, + keepStandardInOpen: keepStandardInOpen, + highPriorityProcess: false, + cancellationToken: cancellationToken + ); + } + + public async Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + bool keepStandardInOpen, + bool highPriorityProcess, + CancellationToken cancellationToken) + { + _invoker.ErrorDataReceived += this.ErrorDataReceived; + _invoker.OutputDataReceived += this.OutputDataReceived; + return await _invoker.ExecuteAsync( + workingDirectory, + fileName, + arguments, + environment, + requireExitCodeZero, + outputEncoding, + killProcessOnCancel, + redirectStandardIn, + inheritConsoleHandler, + keepStandardInOpen, + highPriorityProcess, + cancellationToken); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (_invoker != null) + { + _invoker.Dispose(); + _invoker = null; + } + } + } + } +} diff --git a/src/Runner.Common/Runner.Common.csproj b/src/Runner.Common/Runner.Common.csproj new file mode 100644 index 00000000000..2ebdff76734 --- /dev/null +++ b/src/Runner.Common/Runner.Common.csproj @@ -0,0 +1,68 @@ + + + + netcoreapp2.2 + Library + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + + + + + + + + + + + + + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Runner.Common/RunnerCertificateManager.cs b/src/Runner.Common/RunnerCertificateManager.cs new file mode 100644 index 00000000000..40389a55b67 --- /dev/null +++ b/src/Runner.Common/RunnerCertificateManager.cs @@ -0,0 +1,231 @@ +using System; +using GitHub.Runner.Common.Util; +using System.IO; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using System.Security.Cryptography.X509Certificates; +using System.Net; +using System.Net.Security; +using System.Net.Http; +using GitHub.Services.WebApi; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(RunnerCertificateManager))] + public interface IRunnerCertificateManager : IRunnerService + { + bool SkipServerCertificateValidation { get; } + string CACertificateFile { get; } + string ClientCertificateFile { get; } + string ClientCertificatePrivateKeyFile { get; } + string ClientCertificateArchiveFile { get; } + string ClientCertificatePassword { get; } + IVssClientCertificateManager VssClientCertificateManager { get; } + } + + public class RunnerCertificateManager : RunnerService, IRunnerCertificateManager + { + private RunnerClientCertificateManager _runnerClientCertificateManager = new RunnerClientCertificateManager(); + + public bool SkipServerCertificateValidation { private set; get; } + public string CACertificateFile { private set; get; } + public string ClientCertificateFile { private set; get; } + public string ClientCertificatePrivateKeyFile { private set; get; } + public string ClientCertificateArchiveFile { private set; get; } + public string ClientCertificatePassword { private set; get; } + public IVssClientCertificateManager VssClientCertificateManager => _runnerClientCertificateManager; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + LoadCertificateSettings(); + } + + // This should only be called from config + public void SetupCertificate(bool skipCertValidation, string caCert, string clientCert, string clientCertPrivateKey, string clientCertArchive, string clientCertPassword) + { + Trace.Info("Setup runner certificate setting base on configuration inputs."); + + if (skipCertValidation) + { + Trace.Info("Ignore SSL server certificate validation error"); + SkipServerCertificateValidation = true; + VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator; + } + + if (!string.IsNullOrEmpty(caCert)) + { + ArgUtil.File(caCert, nameof(caCert)); + Trace.Info($"Self-Signed CA '{caCert}'"); + } + + if (!string.IsNullOrEmpty(clientCert)) + { + ArgUtil.File(clientCert, nameof(clientCert)); + ArgUtil.File(clientCertPrivateKey, nameof(clientCertPrivateKey)); + ArgUtil.File(clientCertArchive, nameof(clientCertArchive)); + + Trace.Info($"Client cert '{clientCert}'"); + Trace.Info($"Client cert private key '{clientCertPrivateKey}'"); + Trace.Info($"Client cert archive '{clientCertArchive}'"); + } + + CACertificateFile = caCert; + ClientCertificateFile = clientCert; + ClientCertificatePrivateKeyFile = clientCertPrivateKey; + ClientCertificateArchiveFile = clientCertArchive; + ClientCertificatePassword = clientCertPassword; + + _runnerClientCertificateManager.AddClientCertificate(ClientCertificateArchiveFile, ClientCertificatePassword); + } + + // This should only be called from config + public void SaveCertificateSetting() + { + string certSettingFile = HostContext.GetConfigFile(WellKnownConfigFile.Certificates); + IOUtil.DeleteFile(certSettingFile); + + var setting = new RunnerCertificateSetting(); + if (SkipServerCertificateValidation) + { + Trace.Info($"Store Skip ServerCertificateValidation setting to '{certSettingFile}'"); + setting.SkipServerCertValidation = true; + } + + if (!string.IsNullOrEmpty(CACertificateFile)) + { + Trace.Info($"Store CA cert setting to '{certSettingFile}'"); + setting.CACert = CACertificateFile; + } + + if (!string.IsNullOrEmpty(ClientCertificateFile) && + !string.IsNullOrEmpty(ClientCertificatePrivateKeyFile) && + !string.IsNullOrEmpty(ClientCertificateArchiveFile)) + { + Trace.Info($"Store client cert settings to '{certSettingFile}'"); + + setting.ClientCert = ClientCertificateFile; + setting.ClientCertPrivatekey = ClientCertificatePrivateKeyFile; + setting.ClientCertArchive = ClientCertificateArchiveFile; + + if (!string.IsNullOrEmpty(ClientCertificatePassword)) + { + string lookupKey = Guid.NewGuid().ToString("D").ToUpperInvariant(); + Trace.Info($"Store client cert private key password with lookup key {lookupKey}"); + + var credStore = HostContext.GetService(); + credStore.Write($"GITHUB_ACTIONS_RUNNER_CLIENT_CERT_PASSWORD_{lookupKey}", "GitHub", ClientCertificatePassword); + + setting.ClientCertPasswordLookupKey = lookupKey; + } + } + + if (SkipServerCertificateValidation || + !string.IsNullOrEmpty(CACertificateFile) || + !string.IsNullOrEmpty(ClientCertificateFile)) + { + IOUtil.SaveObject(setting, certSettingFile); + File.SetAttributes(certSettingFile, File.GetAttributes(certSettingFile) | FileAttributes.Hidden); + } + } + + // This should only be called from unconfig + public void DeleteCertificateSetting() + { + string certSettingFile = HostContext.GetConfigFile(WellKnownConfigFile.Certificates); + if (File.Exists(certSettingFile)) + { + Trace.Info($"Load runner certificate setting from '{certSettingFile}'"); + var certSetting = IOUtil.LoadObject(certSettingFile); + + if (certSetting != null && !string.IsNullOrEmpty(certSetting.ClientCertPasswordLookupKey)) + { + Trace.Info("Delete client cert private key password from credential store."); + var credStore = HostContext.GetService(); + credStore.Delete($"GITHUB_ACTIONS_RUNNER_CLIENT_CERT_PASSWORD_{certSetting.ClientCertPasswordLookupKey}"); + } + + Trace.Info($"Delete cert setting file: {certSettingFile}"); + IOUtil.DeleteFile(certSettingFile); + } + } + + public void LoadCertificateSettings() + { + string certSettingFile = HostContext.GetConfigFile(WellKnownConfigFile.Certificates); + if (File.Exists(certSettingFile)) + { + Trace.Info($"Load runner certificate setting from '{certSettingFile}'"); + var certSetting = IOUtil.LoadObject(certSettingFile); + ArgUtil.NotNull(certSetting, nameof(RunnerCertificateSetting)); + + if (certSetting.SkipServerCertValidation) + { + Trace.Info("Ignore SSL server certificate validation error"); + SkipServerCertificateValidation = true; + VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator; + } + + if (!string.IsNullOrEmpty(certSetting.CACert)) + { + // make sure all settings file exist + ArgUtil.File(certSetting.CACert, nameof(certSetting.CACert)); + Trace.Info($"CA '{certSetting.CACert}'"); + CACertificateFile = certSetting.CACert; + } + + if (!string.IsNullOrEmpty(certSetting.ClientCert)) + { + // make sure all settings file exist + ArgUtil.File(certSetting.ClientCert, nameof(certSetting.ClientCert)); + ArgUtil.File(certSetting.ClientCertPrivatekey, nameof(certSetting.ClientCertPrivatekey)); + ArgUtil.File(certSetting.ClientCertArchive, nameof(certSetting.ClientCertArchive)); + + Trace.Info($"Client cert '{certSetting.ClientCert}'"); + Trace.Info($"Client cert private key '{certSetting.ClientCertPrivatekey}'"); + Trace.Info($"Client cert archive '{certSetting.ClientCertArchive}'"); + + ClientCertificateFile = certSetting.ClientCert; + ClientCertificatePrivateKeyFile = certSetting.ClientCertPrivatekey; + ClientCertificateArchiveFile = certSetting.ClientCertArchive; + + if (!string.IsNullOrEmpty(certSetting.ClientCertPasswordLookupKey)) + { + var cerdStore = HostContext.GetService(); + ClientCertificatePassword = cerdStore.Read($"GITHUB_ACTIONS_RUNNER_CLIENT_CERT_PASSWORD_{certSetting.ClientCertPasswordLookupKey}").Password; + HostContext.SecretMasker.AddValue(ClientCertificatePassword); + } + + _runnerClientCertificateManager.AddClientCertificate(ClientCertificateArchiveFile, ClientCertificatePassword); + } + } + else + { + Trace.Info("No certificate setting found."); + } + } + } + + [DataContract] + internal class RunnerCertificateSetting + { + [DataMember] + public bool SkipServerCertValidation { get; set; } + + [DataMember] + public string CACert { get; set; } + + [DataMember] + public string ClientCert { get; set; } + + [DataMember] + public string ClientCertPrivatekey { get; set; } + + [DataMember] + public string ClientCertArchive { get; set; } + + [DataMember] + public string ClientCertPasswordLookupKey { get; set; } + } +} diff --git a/src/Runner.Common/RunnerCredentialStore.cs b/src/Runner.Common/RunnerCredentialStore.cs new file mode 100644 index 00000000000..1a1520abf76 --- /dev/null +++ b/src/Runner.Common/RunnerCredentialStore.cs @@ -0,0 +1,948 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Net; +using System.Runtime.InteropServices; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading; +using GitHub.Runner.Common.Util; +using Newtonsoft.Json; +using System.IO; +using System.Runtime.Serialization; +using System.Security.Cryptography; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + // The purpose of this class is to store user's credential during runner configuration and retrive the credential back at runtime. +#if OS_WINDOWS + [ServiceLocator(Default = typeof(WindowsRunnerCredentialStore))] +#elif OS_OSX + [ServiceLocator(Default = typeof(MacOSRunnerCredentialStore))] +#else + [ServiceLocator(Default = typeof(LinuxRunnerCredentialStore))] +#endif + public interface IRunnerCredentialStore : IRunnerService + { + NetworkCredential Write(string target, string username, string password); + + // throw exception when target not found from cred store + NetworkCredential Read(string target); + + // throw exception when target not found from cred store + void Delete(string target); + } + +#if OS_WINDOWS + // Windows credential store is per user. + // This is a limitation for user configure the runner run as windows service, when user's current login account is different with the service run as account. + // Ex: I login the box as domain\admin, configure the runner as windows service and run as domian\buildserver + // domain\buildserver won't read the stored credential from domain\admin's windows credential store. + // To workaround this limitation. + // Anytime we try to save a credential: + // 1. store it into current user's windows credential store + // 2. use DP-API do a machine level encrypt and store the encrypted content on disk. + // At the first time we try to read the credential: + // 1. read from current user's windows credential store, delete the DP-API encrypted backup content on disk if the windows credential store read succeed. + // 2. if credential not found in current user's windows credential store, read from the DP-API encrypted backup content on disk, + // write the credential back the current user's windows credential store and delete the backup on disk. + public sealed class WindowsRunnerCredentialStore : RunnerService, IRunnerCredentialStore + { + private string _credStoreFile; + private Dictionary _credStore; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _credStoreFile = hostContext.GetConfigFile(WellKnownConfigFile.CredentialStore); + if (File.Exists(_credStoreFile)) + { + _credStore = IOUtil.LoadObject>(_credStoreFile); + } + else + { + _credStore = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + } + + public NetworkCredential Write(string target, string username, string password) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + ArgUtil.NotNullOrEmpty(username, nameof(username)); + ArgUtil.NotNullOrEmpty(password, nameof(password)); + + // save to .credential_store file first, then Windows credential store + string usernameBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(username)); + string passwordBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(password)); + + // Base64Username:Base64Password -> DP-API machine level encrypt -> Base64Encoding + string encryptedUsernamePassword = Convert.ToBase64String(ProtectedData.Protect(Encoding.UTF8.GetBytes($"{usernameBase64}:{passwordBase64}"), null, DataProtectionScope.LocalMachine)); + Trace.Info($"Credentials for '{target}' written to credential store file."); + _credStore[target] = encryptedUsernamePassword; + + // save to .credential_store file + SyncCredentialStoreFile(); + + // save to Windows Credential Store + return WriteInternal(target, username, password); + } + + public NetworkCredential Read(string target) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + IntPtr credPtr = IntPtr.Zero; + try + { + if (CredRead(target, CredentialType.Generic, 0, out credPtr)) + { + Credential credStruct = (Credential)Marshal.PtrToStructure(credPtr, typeof(Credential)); + int passwordLength = (int)credStruct.CredentialBlobSize; + string password = passwordLength > 0 ? Marshal.PtrToStringUni(credStruct.CredentialBlob, passwordLength / sizeof(char)) : String.Empty; + string username = Marshal.PtrToStringUni(credStruct.UserName); + Trace.Info($"Credentials for '{target}' read from windows credential store."); + + // delete from .credential_store file since we are able to read it from windows credential store + if (_credStore.Remove(target)) + { + Trace.Info($"Delete credentials for '{target}' from credential store file."); + SyncCredentialStoreFile(); + } + + return new NetworkCredential(username, password); + } + else + { + // Can't read from Windows Credential Store, fail back to .credential_store file + if (_credStore.ContainsKey(target) && !string.IsNullOrEmpty(_credStore[target])) + { + Trace.Info($"Credentials for '{target}' read from credential store file."); + + // Base64Decode -> DP-API machine level decrypt -> Base64Username:Base64Password -> Base64Decode + string decryptedUsernamePassword = Encoding.UTF8.GetString(ProtectedData.Unprotect(Convert.FromBase64String(_credStore[target]), null, DataProtectionScope.LocalMachine)); + + string[] credential = decryptedUsernamePassword.Split(':'); + if (credential.Length == 2 && !string.IsNullOrEmpty(credential[0]) && !string.IsNullOrEmpty(credential[1])) + { + string username = Encoding.UTF8.GetString(Convert.FromBase64String(credential[0])); + string password = Encoding.UTF8.GetString(Convert.FromBase64String(credential[1])); + + // store back to windows credential store for current user + NetworkCredential creds = WriteInternal(target, username, password); + + // delete from .credential_store file since we are able to write the credential to windows credential store for current user. + if (_credStore.Remove(target)) + { + Trace.Info($"Delete credentials for '{target}' from credential store file."); + SyncCredentialStoreFile(); + } + + return creds; + } + else + { + throw new ArgumentOutOfRangeException(nameof(decryptedUsernamePassword)); + } + } + + throw new Win32Exception(Marshal.GetLastWin32Error(), $"CredRead throw an error for '{target}'"); + } + } + finally + { + if (credPtr != IntPtr.Zero) + { + CredFree(credPtr); + } + } + } + + public void Delete(string target) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + + // remove from .credential_store file + if (_credStore.Remove(target)) + { + Trace.Info($"Delete credentials for '{target}' from credential store file."); + SyncCredentialStoreFile(); + } + + // remove from windows credential store + if (!CredDelete(target, CredentialType.Generic, 0)) + { + throw new Win32Exception(Marshal.GetLastWin32Error(), $"Failed to delete credentials for {target}"); + } + else + { + Trace.Info($"Credentials for '{target}' deleted from windows credential store."); + } + } + + private NetworkCredential WriteInternal(string target, string username, string password) + { + // save to Windows Credential Store + Credential credential = new Credential() + { + Type = CredentialType.Generic, + Persist = (UInt32)CredentialPersist.LocalMachine, + TargetName = Marshal.StringToCoTaskMemUni(target), + UserName = Marshal.StringToCoTaskMemUni(username), + CredentialBlob = Marshal.StringToCoTaskMemUni(password), + CredentialBlobSize = (UInt32)Encoding.Unicode.GetByteCount(password), + AttributeCount = 0, + Comment = IntPtr.Zero, + Attributes = IntPtr.Zero, + TargetAlias = IntPtr.Zero + }; + + try + { + if (CredWrite(ref credential, 0)) + { + Trace.Info($"Credentials for '{target}' written to windows credential store."); + return new NetworkCredential(username, password); + } + else + { + int error = Marshal.GetLastWin32Error(); + throw new Win32Exception(error, "Failed to write credentials"); + } + } + finally + { + if (credential.CredentialBlob != IntPtr.Zero) + { + Marshal.FreeCoTaskMem(credential.CredentialBlob); + } + if (credential.TargetName != IntPtr.Zero) + { + Marshal.FreeCoTaskMem(credential.TargetName); + } + if (credential.UserName != IntPtr.Zero) + { + Marshal.FreeCoTaskMem(credential.UserName); + } + } + } + + private void SyncCredentialStoreFile() + { + Trace.Info("Sync in-memory credential store with credential store file."); + + // delete the cred store file first anyway, since it's a readonly file. + IOUtil.DeleteFile(_credStoreFile); + + // delete cred store file when all creds gone + if (_credStore.Count == 0) + { + return; + } + else + { + IOUtil.SaveObject(_credStore, _credStoreFile); + File.SetAttributes(_credStoreFile, File.GetAttributes(_credStoreFile) | FileAttributes.Hidden); + } + } + + [DllImport("Advapi32.dll", EntryPoint = "CredDeleteW", CharSet = CharSet.Unicode, SetLastError = true)] + internal static extern bool CredDelete(string target, CredentialType type, int reservedFlag); + + [DllImport("Advapi32.dll", EntryPoint = "CredReadW", CharSet = CharSet.Unicode, SetLastError = true)] + internal static extern bool CredRead(string target, CredentialType type, int reservedFlag, out IntPtr CredentialPtr); + + [DllImport("Advapi32.dll", EntryPoint = "CredWriteW", CharSet = CharSet.Unicode, SetLastError = true)] + internal static extern bool CredWrite([In] ref Credential userCredential, [In] UInt32 flags); + + [DllImport("Advapi32.dll", EntryPoint = "CredFree", SetLastError = true)] + internal static extern bool CredFree([In] IntPtr cred); + + internal enum CredentialPersist : UInt32 + { + Session = 0x01, + LocalMachine = 0x02 + } + + internal enum CredentialType : uint + { + Generic = 0x01, + DomainPassword = 0x02, + DomainCertificate = 0x03 + } + + [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] + internal struct Credential + { + public UInt32 Flags; + public CredentialType Type; + public IntPtr TargetName; + public IntPtr Comment; + public System.Runtime.InteropServices.ComTypes.FILETIME LastWritten; + public UInt32 CredentialBlobSize; + public IntPtr CredentialBlob; + public UInt32 Persist; + public UInt32 AttributeCount; + public IntPtr Attributes; + public IntPtr TargetAlias; + public IntPtr UserName; + } + } +#elif OS_OSX + public sealed class MacOSRunnerCredentialStore : RunnerService, IRunnerCredentialStore + { + private const string _osxRunnerCredStoreKeyChainName = "_GITHUB_ACTIONS_RUNNER_CREDSTORE_INTERNAL_"; + + // Keychain requires a password, but this is not intended to add security + private const string _osxRunnerCredStoreKeyChainPassword = "C46F23C36AF94B72B1EAEE32C68670A0"; + + private string _securityUtil; + + private string _runnerCredStoreKeyChain; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _securityUtil = WhichUtil.Which("security", true, Trace); + + _runnerCredStoreKeyChain = hostContext.GetConfigFile(WellKnownConfigFile.CredentialStore); + + // Create osx key chain if it doesn't exists. + if (!File.Exists(_runnerCredStoreKeyChain)) + { + List securityOut = new List(); + List securityError = new List(); + object outputLock = new object(); + using (var p = HostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + securityOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + securityError.Add(stderr.Data); + } + } + }; + + // make sure the 'security' has access to the key so we won't get prompt at runtime. + int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root), + fileName: _securityUtil, + arguments: $"create-keychain -p {_osxRunnerCredStoreKeyChainPassword} \"{_runnerCredStoreKeyChain}\"", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info($"Successfully create-keychain for {_runnerCredStoreKeyChain}"); + } + else + { + if (securityOut.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityOut)); + } + if (securityError.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityError)); + } + + throw new InvalidOperationException($"'security create-keychain' failed with exit code {exitCode}."); + } + } + } + else + { + // Try unlock and lock the keychain, make sure it's still in good stage + UnlockKeyChain(); + LockKeyChain(); + } + } + + public NetworkCredential Write(string target, string username, string password) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + ArgUtil.NotNullOrEmpty(username, nameof(username)); + ArgUtil.NotNullOrEmpty(password, nameof(password)); + + try + { + UnlockKeyChain(); + + // base64encode username + ':' + base64encode password + // OSX keychain requires you provide -s target and -a username to retrieve password + // So, we will trade both username and password as 'secret' store into keychain + string usernameBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(username)); + string passwordBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(password)); + string secretForKeyChain = $"{usernameBase64}:{passwordBase64}"; + + List securityOut = new List(); + List securityError = new List(); + object outputLock = new object(); + using (var p = HostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + securityOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + securityError.Add(stderr.Data); + } + } + }; + + // make sure the 'security' has access to the key so we won't get prompt at runtime. + int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root), + fileName: _securityUtil, + arguments: $"add-generic-password -s {target} -a GITHUBACTIONSRUNNER -w {secretForKeyChain} -T \"{_securityUtil}\" \"{_runnerCredStoreKeyChain}\"", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info($"Successfully add-generic-password for {target} (GITHUBACTIONSRUNNER)"); + } + else + { + if (securityOut.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityOut)); + } + if (securityError.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityError)); + } + + throw new InvalidOperationException($"'security add-generic-password' failed with exit code {exitCode}."); + } + } + + return new NetworkCredential(username, password); + } + finally + { + LockKeyChain(); + } + } + + public NetworkCredential Read(string target) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + + try + { + UnlockKeyChain(); + + string username; + string password; + + List securityOut = new List(); + List securityError = new List(); + object outputLock = new object(); + using (var p = HostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + securityOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + securityError.Add(stderr.Data); + } + } + }; + + int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root), + fileName: _securityUtil, + arguments: $"find-generic-password -s {target} -a GITHUBACTIONSRUNNER -w -g \"{_runnerCredStoreKeyChain}\"", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + string keyChainSecret = securityOut.First(); + string[] secrets = keyChainSecret.Split(':'); + if (secrets.Length == 2 && !string.IsNullOrEmpty(secrets[0]) && !string.IsNullOrEmpty(secrets[1])) + { + Trace.Info($"Successfully find-generic-password for {target} (GITHUBACTIONSRUNNER)"); + username = Encoding.UTF8.GetString(Convert.FromBase64String(secrets[0])); + password = Encoding.UTF8.GetString(Convert.FromBase64String(secrets[1])); + return new NetworkCredential(username, password); + } + else + { + throw new ArgumentOutOfRangeException(nameof(keyChainSecret)); + } + } + else + { + if (securityOut.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityOut)); + } + if (securityError.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityError)); + } + + throw new InvalidOperationException($"'security find-generic-password' failed with exit code {exitCode}."); + } + } + } + finally + { + LockKeyChain(); + } + } + + public void Delete(string target) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + + try + { + UnlockKeyChain(); + + List securityOut = new List(); + List securityError = new List(); + object outputLock = new object(); + + using (var p = HostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + securityOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + securityError.Add(stderr.Data); + } + } + }; + + int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root), + fileName: _securityUtil, + arguments: $"delete-generic-password -s {target} -a GITHUBACTIONSRUNNER \"{_runnerCredStoreKeyChain}\"", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info($"Successfully delete-generic-password for {target} (GITHUBACTIONSRUNNER)"); + } + else + { + if (securityOut.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityOut)); + } + if (securityError.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityError)); + } + + throw new InvalidOperationException($"'security delete-generic-password' failed with exit code {exitCode}."); + } + } + } + finally + { + LockKeyChain(); + } + } + + private void UnlockKeyChain() + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(_securityUtil, nameof(_securityUtil)); + ArgUtil.NotNullOrEmpty(_runnerCredStoreKeyChain, nameof(_runnerCredStoreKeyChain)); + + List securityOut = new List(); + List securityError = new List(); + object outputLock = new object(); + using (var p = HostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + securityOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + securityError.Add(stderr.Data); + } + } + }; + + // make sure the 'security' has access to the key so we won't get prompt at runtime. + int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root), + fileName: _securityUtil, + arguments: $"unlock-keychain -p {_osxRunnerCredStoreKeyChainPassword} \"{_runnerCredStoreKeyChain}\"", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info($"Successfully unlock-keychain for {_runnerCredStoreKeyChain}"); + } + else + { + if (securityOut.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityOut)); + } + if (securityError.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityError)); + } + + throw new InvalidOperationException($"'security unlock-keychain' failed with exit code {exitCode}."); + } + } + } + + private void LockKeyChain() + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(_securityUtil, nameof(_securityUtil)); + ArgUtil.NotNullOrEmpty(_runnerCredStoreKeyChain, nameof(_runnerCredStoreKeyChain)); + + List securityOut = new List(); + List securityError = new List(); + object outputLock = new object(); + using (var p = HostContext.CreateService()) + { + p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (outputLock) + { + securityOut.Add(stdout.Data); + } + } + }; + + p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (outputLock) + { + securityError.Add(stderr.Data); + } + } + }; + + // make sure the 'security' has access to the key so we won't get prompt at runtime. + int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root), + fileName: _securityUtil, + arguments: $"lock-keychain \"{_runnerCredStoreKeyChain}\"", + environment: null, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info($"Successfully lock-keychain for {_runnerCredStoreKeyChain}"); + } + else + { + if (securityOut.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityOut)); + } + if (securityError.Count > 0) + { + Trace.Error(string.Join(Environment.NewLine, securityError)); + } + + throw new InvalidOperationException($"'security lock-keychain' failed with exit code {exitCode}."); + } + } + } + } +#else + public sealed class LinuxRunnerCredentialStore : RunnerService, IRunnerCredentialStore + { + // 'ghrunner' 128 bits iv + private readonly byte[] iv = new byte[] { 0x67, 0x68, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x67, 0x68, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72 }; + + // 256 bits key + private byte[] _symmetricKey; + private string _credStoreFile; + private Dictionary _credStore; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _credStoreFile = hostContext.GetConfigFile(WellKnownConfigFile.CredentialStore); + if (File.Exists(_credStoreFile)) + { + _credStore = IOUtil.LoadObject>(_credStoreFile); + } + else + { + _credStore = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + string machineId; + if (File.Exists("/etc/machine-id")) + { + // try use machine-id as encryption key + // this helps avoid accidental information disclosure, but isn't intended for true security + machineId = File.ReadAllLines("/etc/machine-id").FirstOrDefault(); + Trace.Info($"machine-id length {machineId?.Length ?? 0}."); + + // machine-id doesn't exist or machine-id is not 256 bits + if (string.IsNullOrEmpty(machineId) || machineId.Length != 32) + { + Trace.Warning("Can not get valid machine id from '/etc/machine-id'."); + machineId = "43e7fe5da07740cf914b90f1dac51c2a"; + } + } + else + { + // /etc/machine-id not exist + Trace.Warning("/etc/machine-id doesn't exist."); + machineId = "43e7fe5da07740cf914b90f1dac51c2a"; + } + + List keyBuilder = new List(); + foreach (var c in machineId) + { + keyBuilder.Add(Convert.ToByte(c)); + } + + _symmetricKey = keyBuilder.ToArray(); + } + + public NetworkCredential Write(string target, string username, string password) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + ArgUtil.NotNullOrEmpty(username, nameof(username)); + ArgUtil.NotNullOrEmpty(password, nameof(password)); + + Trace.Info($"Store credential for '{target}' to cred store."); + Credential cred = new Credential(username, Encrypt(password)); + _credStore[target] = cred; + SyncCredentialStoreFile(); + return new NetworkCredential(username, password); + } + + public NetworkCredential Read(string target) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + Trace.Info($"Read credential for '{target}' from cred store."); + if (_credStore.ContainsKey(target)) + { + Credential cred = _credStore[target]; + if (!string.IsNullOrEmpty(cred.UserName) && !string.IsNullOrEmpty(cred.Password)) + { + Trace.Info($"Return credential for '{target}' from cred store."); + return new NetworkCredential(cred.UserName, Decrypt(cred.Password)); + } + } + + throw new KeyNotFoundException(target); + } + + public void Delete(string target) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + + if (_credStore.ContainsKey(target)) + { + Trace.Info($"Delete credential for '{target}' from cred store."); + _credStore.Remove(target); + SyncCredentialStoreFile(); + } + else + { + throw new KeyNotFoundException(target); + } + } + + private void SyncCredentialStoreFile() + { + Trace.Entering(); + Trace.Info("Sync in-memory credential store with credential store file."); + + // delete cred store file when all creds gone + if (_credStore.Count == 0) + { + IOUtil.DeleteFile(_credStoreFile); + return; + } + + if (!File.Exists(_credStoreFile)) + { + CreateCredentialStoreFile(); + } + + IOUtil.SaveObject(_credStore, _credStoreFile); + } + + private string Encrypt(string secret) + { + using (Aes aes = Aes.Create()) + { + aes.Key = _symmetricKey; + aes.IV = iv; + + // Create a decrytor to perform the stream transform. + ICryptoTransform encryptor = aes.CreateEncryptor(); + + // Create the streams used for encryption. + using (MemoryStream msEncrypt = new MemoryStream()) + { + using (CryptoStream csEncrypt = new CryptoStream(msEncrypt, encryptor, CryptoStreamMode.Write)) + { + using (StreamWriter swEncrypt = new StreamWriter(csEncrypt)) + { + swEncrypt.Write(secret); + } + + return Convert.ToBase64String(msEncrypt.ToArray()); + } + } + } + } + + private string Decrypt(string encryptedText) + { + using (Aes aes = Aes.Create()) + { + aes.Key = _symmetricKey; + aes.IV = iv; + + // Create a decrytor to perform the stream transform. + ICryptoTransform decryptor = aes.CreateDecryptor(); + + // Create the streams used for decryption. + using (MemoryStream msDecrypt = new MemoryStream(Convert.FromBase64String(encryptedText))) + { + using (CryptoStream csDecrypt = new CryptoStream(msDecrypt, decryptor, CryptoStreamMode.Read)) + { + using (StreamReader srDecrypt = new StreamReader(csDecrypt)) + { + // Read the decrypted bytes from the decrypting stream and place them in a string. + return srDecrypt.ReadToEnd(); + } + } + } + } + } + + private void CreateCredentialStoreFile() + { + File.WriteAllText(_credStoreFile, ""); + File.SetAttributes(_credStoreFile, File.GetAttributes(_credStoreFile) | FileAttributes.Hidden); + + // Try to lock down the .credentials_store file to the owner/group + var chmodPath = WhichUtil.Which("chmod", trace: Trace); + if (!String.IsNullOrEmpty(chmodPath)) + { + var arguments = $"600 {new FileInfo(_credStoreFile).FullName}"; + using (var invoker = HostContext.CreateService()) + { + var exitCode = invoker.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), chmodPath, arguments, null, default(CancellationToken)).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info("Successfully set permissions for credentials store file {0}", _credStoreFile); + } + else + { + Trace.Warning("Unable to successfully set permissions for credentials store file {0}. Received exit code {1} from {2}", _credStoreFile, exitCode, chmodPath); + } + } + } + else + { + Trace.Warning("Unable to locate chmod to set permissions for credentials store file {0}.", _credStoreFile); + } + } + } + + [DataContract] + internal class Credential + { + public Credential() + { } + + public Credential(string userName, string password) + { + UserName = userName; + Password = password; + } + + [DataMember(IsRequired = true)] + public string UserName { get; set; } + + [DataMember(IsRequired = true)] + public string Password { get; set; } + } +#endif +} diff --git a/src/Runner.Common/RunnerServer.cs b/src/Runner.Common/RunnerServer.cs new file mode 100644 index 00000000000..d987afc70d9 --- /dev/null +++ b/src/Runner.Common/RunnerServer.cs @@ -0,0 +1,355 @@ +using GitHub.DistributedTask.WebApi; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common.Util; +using GitHub.Services.WebApi; +using GitHub.Services.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + public enum RunnerConnectionType + { + Generic, + MessageQueue, + JobRequest + } + + [ServiceLocator(Default = typeof(RunnerServer))] + public interface IRunnerServer : IRunnerService + { + Task ConnectAsync(Uri serverUrl, VssCredentials credentials); + + Task RefreshConnectionAsync(RunnerConnectionType connectionType, TimeSpan timeout); + + void SetConnectionTimeout(RunnerConnectionType connectionType, TimeSpan timeout); + + // Configuration + Task AddAgentAsync(Int32 agentPoolId, TaskAgent agent); + Task DeleteAgentAsync(int agentPoolId, int agentId); + Task> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation); + Task> GetAgentsAsync(int agentPoolId, string agentName = null); + Task UpdateAgentAsync(int agentPoolId, TaskAgent agent); + + // messagequeue + Task CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken); + Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken); + Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken); + Task GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, CancellationToken cancellationToken); + + // job request + Task GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken); + Task RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken); + Task FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken); + + // agent package + Task> GetPackagesAsync(string packageType, string platform, int top, CancellationToken cancellationToken); + Task GetPackageAsync(string packageType, string platform, string version, CancellationToken cancellationToken); + + // agent update + Task UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState); + } + + public sealed class RunnerServer : RunnerService, IRunnerServer + { + private bool _hasGenericConnection; + private bool _hasMessageConnection; + private bool _hasRequestConnection; + private VssConnection _genericConnection; + private VssConnection _messageConnection; + private VssConnection _requestConnection; + private TaskAgentHttpClient _genericTaskAgentClient; + private TaskAgentHttpClient _messageTaskAgentClient; + private TaskAgentHttpClient _requestTaskAgentClient; + + public async Task ConnectAsync(Uri serverUrl, VssCredentials credentials) + { + if (HostContext.RunMode == RunMode.Local) + { + return; + } + + var createGenericConnection = EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(100)); + var createMessageConnection = EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(60)); + var createRequestConnection = EstablishVssConnection(serverUrl, credentials, TimeSpan.FromSeconds(60)); + + await Task.WhenAll(createGenericConnection, createMessageConnection, createRequestConnection); + + _genericConnection = await createGenericConnection; + _messageConnection = await createMessageConnection; + _requestConnection = await createRequestConnection; + + _genericTaskAgentClient = _genericConnection.GetClient(); + _messageTaskAgentClient = _messageConnection.GetClient(); + _requestTaskAgentClient = _requestConnection.GetClient(); + + _hasGenericConnection = true; + _hasMessageConnection = true; + _hasRequestConnection = true; + } + + // Refresh connection is best effort. it should never throw exception + public async Task RefreshConnectionAsync(RunnerConnectionType connectionType, TimeSpan timeout) + { + Trace.Info($"Refresh {connectionType} VssConnection to get on a different AFD node."); + VssConnection newConnection = null; + switch (connectionType) + { + case RunnerConnectionType.MessageQueue: + try + { + _hasMessageConnection = false; + newConnection = await EstablishVssConnection(_messageConnection.Uri, _messageConnection.Credentials, timeout); + var client = newConnection.GetClient(); + _messageConnection = newConnection; + _messageTaskAgentClient = client; + } + catch (Exception ex) + { + Trace.Error($"Catch exception during reset {connectionType} connection."); + Trace.Error(ex); + newConnection?.Dispose(); + } + finally + { + _hasMessageConnection = true; + } + break; + case RunnerConnectionType.JobRequest: + try + { + _hasRequestConnection = false; + newConnection = await EstablishVssConnection(_requestConnection.Uri, _requestConnection.Credentials, timeout); + var client = newConnection.GetClient(); + _requestConnection = newConnection; + _requestTaskAgentClient = client; + } + catch (Exception ex) + { + Trace.Error($"Catch exception during reset {connectionType} connection."); + Trace.Error(ex); + newConnection?.Dispose(); + } + finally + { + _hasRequestConnection = true; + } + break; + case RunnerConnectionType.Generic: + try + { + _hasGenericConnection = false; + newConnection = await EstablishVssConnection(_genericConnection.Uri, _genericConnection.Credentials, timeout); + var client = newConnection.GetClient(); + _genericConnection = newConnection; + _genericTaskAgentClient = client; + } + catch (Exception ex) + { + Trace.Error($"Catch exception during reset {connectionType} connection."); + Trace.Error(ex); + newConnection?.Dispose(); + } + finally + { + _hasGenericConnection = true; + } + break; + default: + Trace.Error($"Unexpected connection type: {connectionType}."); + break; + } + } + + public void SetConnectionTimeout(RunnerConnectionType connectionType, TimeSpan timeout) + { + Trace.Info($"Set {connectionType} VssConnection's timeout to {timeout.TotalSeconds} seconds."); + switch (connectionType) + { + case RunnerConnectionType.JobRequest: + _requestConnection.Settings.SendTimeout = timeout; + break; + case RunnerConnectionType.MessageQueue: + _messageConnection.Settings.SendTimeout = timeout; + break; + case RunnerConnectionType.Generic: + _genericConnection.Settings.SendTimeout = timeout; + break; + default: + Trace.Error($"Unexpected connection type: {connectionType}."); + break; + } + } + + private async Task EstablishVssConnection(Uri serverUrl, VssCredentials credentials, TimeSpan timeout) + { + Trace.Info($"Establish connection with {timeout.TotalSeconds} seconds timeout."); + int attemptCount = 5; + while (attemptCount-- > 0) + { + var connection = VssUtil.CreateConnection(serverUrl, credentials, timeout: timeout); + try + { + await connection.ConnectAsync(); + return connection; + } + catch (Exception ex) when (attemptCount > 0) + { + Trace.Info($"Catch exception during connect. {attemptCount} attempt left."); + Trace.Error(ex); + + await HostContext.Delay(TimeSpan.FromMilliseconds(100), CancellationToken.None); + } + } + + // should never reach here. + throw new InvalidOperationException(nameof(EstablishVssConnection)); + } + + private void CheckConnection(RunnerConnectionType connectionType) + { + switch (connectionType) + { + case RunnerConnectionType.Generic: + if (!_hasGenericConnection) + { + throw new InvalidOperationException($"SetConnection {RunnerConnectionType.Generic}"); + } + break; + case RunnerConnectionType.JobRequest: + if (!_hasRequestConnection) + { + throw new InvalidOperationException($"SetConnection {RunnerConnectionType.JobRequest}"); + } + break; + case RunnerConnectionType.MessageQueue: + if (!_hasMessageConnection) + { + throw new InvalidOperationException($"SetConnection {RunnerConnectionType.MessageQueue}"); + } + break; + default: + throw new NotSupportedException(connectionType.ToString()); + } + } + + //----------------------------------------------------------------- + // Configuration + //----------------------------------------------------------------- + + public Task> GetAgentPoolsAsync(string agentPoolName = null, TaskAgentPoolType poolType = TaskAgentPoolType.Automation) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.GetAgentPoolsAsync(agentPoolName, poolType: poolType); + } + + public Task AddAgentAsync(Int32 agentPoolId, TaskAgent agent) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.AddAgentAsync(agentPoolId, agent); + } + + public Task> GetAgentsAsync(int agentPoolId, string agentName = null) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.GetAgentsAsync(agentPoolId, agentName, false); + } + + public Task UpdateAgentAsync(int agentPoolId, TaskAgent agent) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.ReplaceAgentAsync(agentPoolId, agent); + } + + public Task DeleteAgentAsync(int agentPoolId, int agentId) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.DeleteAgentAsync(agentPoolId, agentId); + } + + //----------------------------------------------------------------- + // MessageQueue + //----------------------------------------------------------------- + + public Task CreateAgentSessionAsync(Int32 poolId, TaskAgentSession session, CancellationToken cancellationToken) + { + CheckConnection(RunnerConnectionType.MessageQueue); + return _messageTaskAgentClient.CreateAgentSessionAsync(poolId, session, cancellationToken: cancellationToken); + } + + public Task DeleteAgentMessageAsync(Int32 poolId, Int64 messageId, Guid sessionId, CancellationToken cancellationToken) + { + CheckConnection(RunnerConnectionType.MessageQueue); + return _messageTaskAgentClient.DeleteMessageAsync(poolId, messageId, sessionId, cancellationToken: cancellationToken); + } + + public Task DeleteAgentSessionAsync(Int32 poolId, Guid sessionId, CancellationToken cancellationToken) + { + CheckConnection(RunnerConnectionType.MessageQueue); + return _messageTaskAgentClient.DeleteAgentSessionAsync(poolId, sessionId, cancellationToken: cancellationToken); + } + + public Task GetAgentMessageAsync(Int32 poolId, Guid sessionId, Int64? lastMessageId, CancellationToken cancellationToken) + { + CheckConnection(RunnerConnectionType.MessageQueue); + return _messageTaskAgentClient.GetMessageAsync(poolId, sessionId, lastMessageId, cancellationToken: cancellationToken); + } + + //----------------------------------------------------------------- + // JobRequest + //----------------------------------------------------------------- + + public Task RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken = default(CancellationToken)) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(JsonUtility.FromString("{ lockedUntil: \"" + DateTime.Now.Add(TimeSpan.FromMinutes(5)).ToString("u") + "\" }")); + } + + CheckConnection(RunnerConnectionType.JobRequest); + return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, cancellationToken: cancellationToken); + } + + public Task FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken)) + { + if (HostContext.RunMode == RunMode.Local) + { + return Task.FromResult(null); + } + + CheckConnection(RunnerConnectionType.JobRequest); + return _requestTaskAgentClient.FinishAgentRequestAsync(poolId, requestId, lockToken, finishTime, result, cancellationToken: cancellationToken); + } + + public Task GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + CheckConnection(RunnerConnectionType.JobRequest); + return _requestTaskAgentClient.GetAgentRequestAsync(poolId, requestId, cancellationToken: cancellationToken); + } + + //----------------------------------------------------------------- + // Agent Package + //----------------------------------------------------------------- + public Task> GetPackagesAsync(string packageType, string platform, int top, CancellationToken cancellationToken) + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.GetPackagesAsync(packageType, platform, top, cancellationToken: cancellationToken); + } + + public Task GetPackageAsync(string packageType, string platform, string version, CancellationToken cancellationToken) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, cancellationToken: cancellationToken); + } + + public Task UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState) + { + CheckConnection(RunnerConnectionType.Generic); + return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState); + } + } +} diff --git a/src/Runner.Common/RunnerService.cs b/src/Runner.Common/RunnerService.cs new file mode 100644 index 00000000000..7d4f52ea0a0 --- /dev/null +++ b/src/Runner.Common/RunnerService.cs @@ -0,0 +1,39 @@ +using System; + +namespace GitHub.Runner.Common +{ + + [AttributeUsage(AttributeTargets.Interface, Inherited = false, AllowMultiple = false)] + public sealed class ServiceLocatorAttribute : Attribute + { + public static readonly string DefaultPropertyName = "Default"; + + public Type Default { get; set; } + } + + public interface IRunnerService + { + void Initialize(IHostContext context); + } + + public abstract class RunnerService + { + protected IHostContext HostContext { get; private set; } + protected Tracing Trace { get; private set; } + + public string TraceName + { + get + { + return GetType().Name; + } + } + + public virtual void Initialize(IHostContext hostContext) + { + HostContext = hostContext; + Trace = HostContext.GetTrace(TraceName); + Trace.Entering(); + } + } +} diff --git a/src/Runner.Common/RunnerWebProxy.cs b/src/Runner.Common/RunnerWebProxy.cs new file mode 100644 index 00000000000..ecb12fe1fba --- /dev/null +++ b/src/Runner.Common/RunnerWebProxy.cs @@ -0,0 +1,196 @@ +using GitHub.Runner.Common.Util; +using System; +using System.Linq; +using System.Net; +using System.IO; +using System.Collections.Generic; +using System.Text.RegularExpressions; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + [ServiceLocator(Default = typeof(RunnerWebProxy))] + public interface IRunnerWebProxy : IRunnerService + { + string ProxyAddress { get; } + string ProxyUsername { get; } + string ProxyPassword { get; } + List ProxyBypassList { get; } + IWebProxy WebProxy { get; } + } + + public class RunnerWebProxy : RunnerService, IRunnerWebProxy + { + private readonly List _regExBypassList = new List(); + private readonly List _bypassList = new List(); + private RunnerWebProxyCore _runnerWebProxy = new RunnerWebProxyCore(); + + public string ProxyAddress { get; private set; } + public string ProxyUsername { get; private set; } + public string ProxyPassword { get; private set; } + public List ProxyBypassList => _bypassList; + public IWebProxy WebProxy => _runnerWebProxy; + + public override void Initialize(IHostContext context) + { + base.Initialize(context); + LoadProxySetting(); + } + + // This should only be called from config + public void SetupProxy(string proxyAddress, string proxyUsername, string proxyPassword) + { + ArgUtil.NotNullOrEmpty(proxyAddress, nameof(proxyAddress)); + Trace.Info($"Update proxy setting from '{ProxyAddress ?? string.Empty}' to'{proxyAddress}'"); + ProxyAddress = proxyAddress; + ProxyUsername = proxyUsername; + ProxyPassword = proxyPassword; + + if (string.IsNullOrEmpty(ProxyUsername) || string.IsNullOrEmpty(ProxyPassword)) + { + Trace.Info($"Config proxy use DefaultNetworkCredentials."); + } + else + { + Trace.Info($"Config authentication proxy as: {ProxyUsername}."); + } + + _runnerWebProxy.Update(ProxyAddress, ProxyUsername, ProxyPassword, ProxyBypassList); + } + + // This should only be called from config + public void SaveProxySetting() + { + if (!string.IsNullOrEmpty(ProxyAddress)) + { + string proxyConfigFile = HostContext.GetConfigFile(WellKnownConfigFile.Proxy); + IOUtil.DeleteFile(proxyConfigFile); + Trace.Info($"Store proxy configuration to '{proxyConfigFile}' for proxy '{ProxyAddress}'"); + File.WriteAllText(proxyConfigFile, ProxyAddress); + File.SetAttributes(proxyConfigFile, File.GetAttributes(proxyConfigFile) | FileAttributes.Hidden); + + string proxyCredFile = HostContext.GetConfigFile(WellKnownConfigFile.ProxyCredentials); + IOUtil.DeleteFile(proxyCredFile); + if (!string.IsNullOrEmpty(ProxyUsername) && !string.IsNullOrEmpty(ProxyPassword)) + { + string lookupKey = Guid.NewGuid().ToString("D").ToUpperInvariant(); + Trace.Info($"Store proxy credential lookup key '{lookupKey}' to '{proxyCredFile}'"); + File.WriteAllText(proxyCredFile, lookupKey); + File.SetAttributes(proxyCredFile, File.GetAttributes(proxyCredFile) | FileAttributes.Hidden); + + var credStore = HostContext.GetService(); + credStore.Write($"GITHUB_ACTIONS_RUNNER_PROXY_{lookupKey}", ProxyUsername, ProxyPassword); + } + } + else + { + Trace.Info("No proxy configuration exist."); + } + } + + // This should only be called from unconfig + public void DeleteProxySetting() + { + string proxyCredFile = HostContext.GetConfigFile(WellKnownConfigFile.ProxyCredentials); + if (File.Exists(proxyCredFile)) + { + Trace.Info("Delete proxy credential from credential store."); + string lookupKey = File.ReadAllLines(proxyCredFile).FirstOrDefault(); + if (!string.IsNullOrEmpty(lookupKey)) + { + var credStore = HostContext.GetService(); + credStore.Delete($"GITHUB_ACTIONS_RUNNER_PROXY_{lookupKey}"); + } + + Trace.Info($"Delete .proxycredentials file: {proxyCredFile}"); + IOUtil.DeleteFile(proxyCredFile); + } + + string proxyBypassFile = HostContext.GetConfigFile(WellKnownConfigFile.ProxyBypass); + if (File.Exists(proxyBypassFile)) + { + Trace.Info($"Delete .proxybypass file: {proxyBypassFile}"); + IOUtil.DeleteFile(proxyBypassFile); + } + + string proxyConfigFile = HostContext.GetConfigFile(WellKnownConfigFile.Proxy); + Trace.Info($"Delete .proxy file: {proxyConfigFile}"); + IOUtil.DeleteFile(proxyConfigFile); + } + + private void LoadProxySetting() + { + string proxyConfigFile = HostContext.GetConfigFile(WellKnownConfigFile.Proxy); + if (File.Exists(proxyConfigFile)) + { + // we expect the first line of the file is the proxy url + Trace.Verbose($"Try read proxy setting from file: {proxyConfigFile}."); + ProxyAddress = File.ReadLines(proxyConfigFile).FirstOrDefault() ?? string.Empty; + ProxyAddress = ProxyAddress.Trim(); + Trace.Verbose($"{ProxyAddress}"); + } + + if (!string.IsNullOrEmpty(ProxyAddress) && !Uri.IsWellFormedUriString(ProxyAddress, UriKind.Absolute)) + { + Trace.Info($"The proxy url is not a well formed absolute uri string: {ProxyAddress}."); + ProxyAddress = string.Empty; + } + + if (!string.IsNullOrEmpty(ProxyAddress)) + { + Trace.Info($"Config proxy at: {ProxyAddress}."); + + string proxyCredFile = HostContext.GetConfigFile(WellKnownConfigFile.ProxyCredentials); + if (File.Exists(proxyCredFile)) + { + string lookupKey = File.ReadAllLines(proxyCredFile).FirstOrDefault(); + if (!string.IsNullOrEmpty(lookupKey)) + { + var credStore = HostContext.GetService(); + var proxyCred = credStore.Read($"GITHUB_ACTIONS_RUNNER_PROXY_{lookupKey}"); + ProxyUsername = proxyCred.UserName; + ProxyPassword = proxyCred.Password; + } + } + + if (!string.IsNullOrEmpty(ProxyPassword)) + { + HostContext.SecretMasker.AddValue(ProxyPassword); + } + + if (string.IsNullOrEmpty(ProxyUsername) || string.IsNullOrEmpty(ProxyPassword)) + { + Trace.Info($"Config proxy use DefaultNetworkCredentials."); + } + else + { + Trace.Info($"Config authentication proxy as: {ProxyUsername}."); + } + + string proxyBypassFile = HostContext.GetConfigFile(WellKnownConfigFile.ProxyBypass); + if (File.Exists(proxyBypassFile)) + { + Trace.Verbose($"Try read proxy bypass list from file: {proxyBypassFile}."); + foreach (string bypass in File.ReadAllLines(proxyBypassFile)) + { + if (string.IsNullOrWhiteSpace(bypass)) + { + continue; + } + else + { + Trace.Info($"Bypass proxy for: {bypass}."); + ProxyBypassList.Add(bypass.Trim()); + } + } + } + + _runnerWebProxy.Update(ProxyAddress, ProxyUsername, ProxyPassword, ProxyBypassList); + } + else + { + Trace.Info($"No proxy setting found."); + } + } + } +} diff --git a/src/Runner.Common/StreamString.cs b/src/Runner.Common/StreamString.cs new file mode 100644 index 00000000000..b09839cb25d --- /dev/null +++ b/src/Runner.Common/StreamString.cs @@ -0,0 +1,96 @@ +// Defines the data protocol for reading and writing strings on our stream +using System; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common +{ + public class StreamString + { + private Stream _ioStream; + private UnicodeEncoding streamEncoding; + + public StreamString(Stream ioStream) + { + _ioStream = ioStream; + streamEncoding = new UnicodeEncoding(); + } + + public async Task ReadInt32Async(CancellationToken cancellationToken) + { + byte[] readBytes = new byte[sizeof(Int32)]; + int dataread = 0; + while (sizeof(Int32) - dataread > 0 && (!cancellationToken.IsCancellationRequested)) + { + Task op = _ioStream.ReadAsync(readBytes, dataread, sizeof(Int32) - dataread, cancellationToken); + int newData = 0; + newData = await op.WithCancellation(cancellationToken); + dataread += newData; + if (0 == newData) + { + await Task.Delay(100, cancellationToken); + } + } + + cancellationToken.ThrowIfCancellationRequested(); + return BitConverter.ToInt32(readBytes, 0); + } + + public async Task WriteInt32Async(Int32 value, CancellationToken cancellationToken) + { + byte[] int32Bytes = BitConverter.GetBytes(value); + Task op = _ioStream.WriteAsync(int32Bytes, 0, sizeof(Int32), cancellationToken); + await op.WithCancellation(cancellationToken); + } + + const int MaxStringSize = 50 * 1000000; + + public async Task ReadStringAsync(CancellationToken cancellationToken) + { + Int32 len = await ReadInt32Async(cancellationToken); + if (len == 0) + { + return string.Empty; + } + if (len < 0 || len > MaxStringSize) + { + throw new InvalidDataException(); + } + + byte[] inBuffer = new byte[len]; + int dataread = 0; + while (len - dataread > 0 && (!cancellationToken.IsCancellationRequested)) + { + Task op = _ioStream.ReadAsync(inBuffer, dataread, len - dataread, cancellationToken); + int newData = 0; + newData = await op.WithCancellation(cancellationToken); + dataread += newData; + if (0 == newData) + { + await Task.Delay(100, cancellationToken); + } + } + + return streamEncoding.GetString(inBuffer); + } + + public async Task WriteStringAsync(string outString, CancellationToken cancellationToken) + { + byte[] outBuffer = streamEncoding.GetBytes(outString); + Int32 len = outBuffer.Length; + if (len > MaxStringSize) + { + throw new ArgumentOutOfRangeException(); + } + + await WriteInt32Async(len, cancellationToken); + cancellationToken.ThrowIfCancellationRequested(); + Task op = _ioStream.WriteAsync(outBuffer, 0, len, cancellationToken); + await op.WithCancellation(cancellationToken); + op = _ioStream.FlushAsync(cancellationToken); + await op.WithCancellation(cancellationToken); + } + } +} diff --git a/src/Runner.Common/Terminal.cs b/src/Runner.Common/Terminal.cs new file mode 100644 index 00000000000..f35a2220d81 --- /dev/null +++ b/src/Runner.Common/Terminal.cs @@ -0,0 +1,198 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Runner.Common +{ + // + // Abstracts away interactions with the terminal which allows: + // (1) Console writes also go to trace for better context in the trace + // (2) Reroute in tests + // + [ServiceLocator(Default = typeof(Terminal))] + public interface ITerminal : IRunnerService, IDisposable + { + event EventHandler CancelKeyPress; + + bool Silent { get; set; } + string ReadLine(); + string ReadSecret(); + void Write(string message, ConsoleColor? colorCode = null); + void WriteLine(); + void WriteLine(string line, ConsoleColor? colorCode = null); + void WriteError(Exception ex); + void WriteError(string line); + void WriteSection(string message); + void WriteSuccessMessage(string message); + } + + public sealed class Terminal : RunnerService, ITerminal + { + public bool Silent { get; set; } + + public event EventHandler CancelKeyPress; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + Console.CancelKeyPress += Console_CancelKeyPress; + } + + private void Console_CancelKeyPress(object sender, ConsoleCancelEventArgs e) + { + e.Cancel = true; + CancelKeyPress?.Invoke(this, e); + } + + public string ReadLine() + { + // Read and trace the value. + Trace.Info("READ LINE"); + string value = Console.ReadLine(); + Trace.Info($"Read value: '{value}'"); + return value; + } + + // TODO: Consider using SecureString. + public string ReadSecret() + { + Trace.Info("READ SECRET"); + var chars = new List(); + while (true) + { + ConsoleKeyInfo key = Console.ReadKey(intercept: true); + if (key.Key == ConsoleKey.Enter) + { + Console.WriteLine(); + break; + } + else if (key.Key == ConsoleKey.Backspace) + { + if (chars.Count > 0) + { + chars.RemoveAt(chars.Count - 1); + Console.Write("\b \b"); + } + } + else if (key.KeyChar > 0) + { + chars.Add(key.KeyChar); + Console.Write("*"); + } + } + + // Trace whether a value was entered. + string val = new String(chars.ToArray()); + if (!string.IsNullOrEmpty(val)) + { + HostContext.SecretMasker.AddValue(val); + } + + Trace.Info($"Read value: '{val}'"); + return val; + } + + public void Write(string message, ConsoleColor? colorCode = null) + { + Trace.Info($"WRITE: {message}"); + if (!Silent) + { + if(colorCode != null) + { + Console.ForegroundColor = colorCode.Value; + Console.Write(message); + Console.ResetColor(); + } + else { + Console.Write(message); + } + } + } + + public void WriteLine() + { + WriteLine(string.Empty); + } + + // Do not add a format string overload. Terminal messages are user facing and therefore + // should be localized. Use the Loc method in the StringUtil class. + public void WriteLine(string line, ConsoleColor? colorCode = null) + { + Trace.Info($"WRITE LINE: {line}"); + if (!Silent) + { + if(colorCode != null) + { + Console.ForegroundColor = colorCode.Value; + Console.WriteLine(line); + Console.ResetColor(); + } + else { + Console.WriteLine(line); + } + } + } + + public void WriteError(Exception ex) + { + Trace.Error("WRITE ERROR (exception):"); + Trace.Error(ex); + if (!Silent) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.Error.WriteLine(ex.Message); + Console.ResetColor(); + } + } + + // Do not add a format string overload. Terminal messages are user facing and therefore + // should be localized. Use the Loc method in the StringUtil class. + public void WriteError(string line) + { + Trace.Error($"WRITE ERROR: {line}"); + if (!Silent) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.Error.WriteLine(line); + Console.ResetColor(); + } + } + + public void WriteSection(string message) + { + if (!Silent) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine($"# {message}"); + Console.ResetColor(); + Console.WriteLine(); + } + } + + public void WriteSuccessMessage(string message) + { + if (!Silent) + { + Console.ForegroundColor = ConsoleColor.Green; + Console.Write("√ "); + Console.ForegroundColor = ConsoleColor.White; + Console.WriteLine(message); + Console.ResetColor(); + } + } + + private void Dispose(bool disposing) + { + if (disposing) + { + Console.CancelKeyPress -= Console_CancelKeyPress; + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + } +} diff --git a/src/Runner.Common/ThrottlingReportHandler.cs b/src/Runner.Common/ThrottlingReportHandler.cs new file mode 100644 index 00000000000..969df606f3a --- /dev/null +++ b/src/Runner.Common/ThrottlingReportHandler.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using GitHub.Services.Common.Internal; + +namespace GitHub.Runner.Common +{ + public class ThrottlingEventArgs : EventArgs + { + public ThrottlingEventArgs(TimeSpan delay, DateTime expiration) + { + Delay = delay; + Expiration = expiration; + } + + public TimeSpan Delay { get; private set; } + public DateTime Expiration { get; private set; } + } + + public interface IThrottlingReporter + { + void ReportThrottling(TimeSpan delay, DateTime expiration); + } + + public class ThrottlingReportHandler : DelegatingHandler + { + private IThrottlingReporter _throttlingReporter; + + public ThrottlingReportHandler(IThrottlingReporter throttlingReporter) + : base() + { + ArgUtil.NotNull(throttlingReporter, nameof(throttlingReporter)); + _throttlingReporter = throttlingReporter; + } + + protected async override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + // Call the inner handler. + var response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); + + // Inspect whether response has throttling information + IEnumerable vssRequestDelayed = null; + IEnumerable vssRequestQuotaReset = null; + + if (response.Headers.TryGetValues(HttpHeaders.VssRateLimitDelay, out vssRequestDelayed) && + response.Headers.TryGetValues(HttpHeaders.VssRateLimitReset, out vssRequestQuotaReset) && + !string.IsNullOrEmpty(vssRequestDelayed.FirstOrDefault()) && + !string.IsNullOrEmpty(vssRequestQuotaReset.FirstOrDefault())) + { + TimeSpan delay = TimeSpan.FromSeconds(double.Parse(vssRequestDelayed.First())); + int expirationEpoch = int.Parse(vssRequestQuotaReset.First()); + DateTime expiration = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc).AddSeconds(expirationEpoch); + + _throttlingReporter.ReportThrottling(delay, expiration); + } + + return response; + } + } +} diff --git a/src/Runner.Common/TraceManager.cs b/src/Runner.Common/TraceManager.cs new file mode 100644 index 00000000000..2500eb82a56 --- /dev/null +++ b/src/Runner.Common/TraceManager.cs @@ -0,0 +1,88 @@ +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Concurrent; +using System.Diagnostics; +using GitHub.DistributedTask.Logging; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + public interface ITraceManager : IDisposable + { + SourceSwitch Switch { get; } + Tracing this[string name] { get; } + } + + public sealed class TraceManager : ITraceManager + { + private readonly ConcurrentDictionary _sources = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + private readonly HostTraceListener _hostTraceListener; + private TraceSetting _traceSetting; + private ISecretMasker _secretMasker; + + public TraceManager(HostTraceListener traceListener, ISecretMasker secretMasker) + : this(traceListener, new TraceSetting(), secretMasker) + { + } + + public TraceManager(HostTraceListener traceListener, TraceSetting traceSetting, ISecretMasker secretMasker) + { + // Validate and store params. + ArgUtil.NotNull(traceListener, nameof(traceListener)); + ArgUtil.NotNull(traceSetting, nameof(traceSetting)); + ArgUtil.NotNull(secretMasker, nameof(secretMasker)); + _hostTraceListener = traceListener; + _traceSetting = traceSetting; + _secretMasker = secretMasker; + + Switch = new SourceSwitch("GitHubActionsRunnerSwitch") + { + Level = _traceSetting.DefaultTraceLevel.ToSourceLevels() + }; + } + + public SourceSwitch Switch { get; private set; } + + public Tracing this[string name] + { + get + { + return _sources.GetOrAdd(name, key => CreateTraceSource(key)); + } + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + foreach (Tracing traceSource in _sources.Values) + { + traceSource.Dispose(); + } + + _sources.Clear(); + } + } + + private Tracing CreateTraceSource(string name) + { + SourceSwitch sourceSwitch = Switch; + + TraceLevel sourceTraceLevel; + if (_traceSetting.DetailTraceSetting.TryGetValue(name, out sourceTraceLevel)) + { + sourceSwitch = new SourceSwitch("GitHubActionsRunnerSubSwitch") + { + Level = sourceTraceLevel.ToSourceLevels() + }; + } + return new Tracing(name, _secretMasker, sourceSwitch, _hostTraceListener); + } + } +} diff --git a/src/Runner.Common/TraceSetting.cs b/src/Runner.Common/TraceSetting.cs new file mode 100644 index 00000000000..786a27210e8 --- /dev/null +++ b/src/Runner.Common/TraceSetting.cs @@ -0,0 +1,92 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Runtime.Serialization; + +namespace GitHub.Runner.Common +{ + [DataContract] + public class TraceSetting + { + public TraceSetting() + { + DefaultTraceLevel = TraceLevel.Info; +#if DEBUG + DefaultTraceLevel = TraceLevel.Verbose; +#endif + string actionsRunnerTrace = Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TRACE"); + if (!string.IsNullOrEmpty(actionsRunnerTrace)) + { + DefaultTraceLevel = TraceLevel.Verbose; + } + } + + [DataMember(EmitDefaultValue = false)] + public TraceLevel DefaultTraceLevel + { + get; + set; + } + + public Dictionary DetailTraceSetting + { + get + { + if (m_detailTraceSetting == null) + { + m_detailTraceSetting = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_detailTraceSetting; + } + } + + [DataMember(EmitDefaultValue = false, Name = "DetailTraceSetting")] + private Dictionary m_detailTraceSetting; + } + + [DataContract] + public enum TraceLevel + { + [EnumMember] + Off = 0, + + [EnumMember] + Critical = 1, + + [EnumMember] + Error = 2, + + [EnumMember] + Warning = 3, + + [EnumMember] + Info = 4, + + [EnumMember] + Verbose = 5, + } + + public static class TraceLevelExtensions + { + public static SourceLevels ToSourceLevels(this TraceLevel traceLevel) + { + switch (traceLevel) + { + case TraceLevel.Off: + return SourceLevels.Off; + case TraceLevel.Critical: + return SourceLevels.Critical; + case TraceLevel.Error: + return SourceLevels.Error; + case TraceLevel.Warning: + return SourceLevels.Warning; + case TraceLevel.Info: + return SourceLevels.Information; + case TraceLevel.Verbose: + return SourceLevels.Verbose; + default: + return SourceLevels.Information; + } + } + } +} diff --git a/src/Runner.Common/Tracing.cs b/src/Runner.Common/Tracing.cs new file mode 100644 index 00000000000..713ed1dc08e --- /dev/null +++ b/src/Runner.Common/Tracing.cs @@ -0,0 +1,128 @@ + +using GitHub.Runner.Common.Util; +using Newtonsoft.Json; +using System; +using System.Diagnostics; +using System.Runtime.CompilerServices; +using GitHub.DistributedTask.Logging; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common +{ + public sealed class Tracing : ITraceWriter, IDisposable + { + private ISecretMasker _secretMasker; + private TraceSource _traceSource; + + public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener) + { + ArgUtil.NotNull(secretMasker, nameof(secretMasker)); + _secretMasker = secretMasker; + _traceSource = new TraceSource(name); + _traceSource.Switch = sourceSwitch; + + // Remove the default trace listener. + if (_traceSource.Listeners.Count > 0 && + _traceSource.Listeners[0] is DefaultTraceListener) + { + _traceSource.Listeners.RemoveAt(0); + } + + _traceSource.Listeners.Add(traceListener); + } + + public void Info(string message) + { + Trace(TraceEventType.Information, message); + } + + public void Info(string format, params object[] args) + { + Trace(TraceEventType.Information, StringUtil.Format(format, args)); + } + + public void Info(object item) + { + string json = JsonConvert.SerializeObject(item, Formatting.Indented); + Trace(TraceEventType.Information, json); + } + + public void Error(Exception exception) + { + Trace(TraceEventType.Error, exception.ToString()); + } + + // Do not remove the non-format overload. + public void Error(string message) + { + Trace(TraceEventType.Error, message); + } + + public void Error(string format, params object[] args) + { + Trace(TraceEventType.Error, StringUtil.Format(format, args)); + } + + // Do not remove the non-format overload. + public void Warning(string message) + { + Trace(TraceEventType.Warning, message); + } + + public void Warning(string format, params object[] args) + { + Trace(TraceEventType.Warning, StringUtil.Format(format, args)); + } + + // Do not remove the non-format overload. + public void Verbose(string message) + { + Trace(TraceEventType.Verbose, message); + } + + public void Verbose(string format, params object[] args) + { + Trace(TraceEventType.Verbose, StringUtil.Format(format, args)); + } + + public void Verbose(object item) + { + string json = JsonConvert.SerializeObject(item, Formatting.Indented); + Trace(TraceEventType.Verbose, json); + } + + public void Entering([CallerMemberName] string name = "") + { + Trace(TraceEventType.Verbose, $"Entering {name}"); + } + + public void Leaving([CallerMemberName] string name = "") + { + Trace(TraceEventType.Verbose, $"Leaving {name}"); + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Trace(TraceEventType eventType, string message) + { + ArgUtil.NotNull(_traceSource, nameof(_traceSource)); + _traceSource.TraceEvent( + eventType: eventType, + id: 0, + message: _secretMasker.MaskSecrets(message)); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + _traceSource.Flush(); + _traceSource.Close(); + } + } + } +} diff --git a/src/Runner.Common/Util/EnumUtil.cs b/src/Runner.Common/Util/EnumUtil.cs new file mode 100644 index 00000000000..468ac8a0d2d --- /dev/null +++ b/src/Runner.Common/Util/EnumUtil.cs @@ -0,0 +1,18 @@ +namespace GitHub.Runner.Common.Util +{ + using System; + + public static class EnumUtil + { + public static T? TryParse(string value) where T: struct + { + T val; + if (Enum.TryParse(value ?? string.Empty, ignoreCase: true, result: out val)) + { + return val; + } + + return null; + } + } +} diff --git a/src/Runner.Common/Util/PlanUtil.cs b/src/Runner.Common/Util/PlanUtil.cs new file mode 100644 index 00000000000..b5480ac8b4b --- /dev/null +++ b/src/Runner.Common/Util/PlanUtil.cs @@ -0,0 +1,28 @@ +using System; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Util +{ + public static class PlanUtil + { + public static PlanFeatures GetFeatures(TaskOrchestrationPlanReference plan) + { + ArgUtil.NotNull(plan, nameof(plan)); + PlanFeatures features = PlanFeatures.None; + if (plan.Version >= 8) + { + features |= PlanFeatures.JobCompletedPlanEvent; + } + + return features; + } + } + + [Flags] + public enum PlanFeatures + { + None = 0, + JobCompletedPlanEvent = 1, + } +} diff --git a/src/Runner.Common/Util/TaskResultUtil.cs b/src/Runner.Common/Util/TaskResultUtil.cs new file mode 100644 index 00000000000..e82bb2896b9 --- /dev/null +++ b/src/Runner.Common/Util/TaskResultUtil.cs @@ -0,0 +1,79 @@ +using GitHub.DistributedTask.WebApi; +using System; + +namespace GitHub.Runner.Common.Util +{ + public static class TaskResultUtil + { + private static readonly int _returnCodeOffset = 100; + + public static bool IsValidReturnCode(int returnCode) + { + int resultInt = returnCode - _returnCodeOffset; + return Enum.IsDefined(typeof(TaskResult), resultInt); + } + + public static int TranslateToReturnCode(TaskResult result) + { + return _returnCodeOffset + (int)result; + } + + public static TaskResult TranslateFromReturnCode(int returnCode) + { + int resultInt = returnCode - _returnCodeOffset; + if (Enum.IsDefined(typeof(TaskResult), resultInt)) + { + return (TaskResult)resultInt; + } + else + { + return TaskResult.Failed; + } + } + + // Merge 2 TaskResults get the worst result. + // Succeeded -> Failed/Canceled/Skipped/Abandoned + // Failed -> Failed/Canceled + // Canceled -> Canceled + // Skipped -> Skipped + // Abandoned -> Abandoned + public static TaskResult MergeTaskResults(TaskResult? currentResult, TaskResult comingResult) + { + if (currentResult == null) + { + return comingResult; + } + + // current result is Canceled/Skip/Abandoned + if (currentResult > TaskResult.Failed) + { + return currentResult.Value; + } + + // comming result is bad than current result + if (comingResult >= currentResult) + { + return comingResult; + } + + return currentResult.Value; + } + + public static ActionResult ToActionResult(this TaskResult result) + { + switch (result) + { + case TaskResult.Succeeded: + return ActionResult.Success; + case TaskResult.Failed: + return ActionResult.Failure; + case TaskResult.Canceled: + return ActionResult.Cancelled; + case TaskResult.Skipped: + return ActionResult.Skipped; + default: + throw new NotSupportedException(result.ToString()); + } + } + } +} diff --git a/src/Runner.Common/Util/UnixUtil.cs b/src/Runner.Common/Util/UnixUtil.cs new file mode 100644 index 00000000000..3500b185648 --- /dev/null +++ b/src/Runner.Common/Util/UnixUtil.cs @@ -0,0 +1,79 @@ +using System; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Util +{ + [ServiceLocator(Default = typeof(UnixUtil))] + public interface IUnixUtil : IRunnerService + { + Task ExecAsync(string workingDirectory, string toolName, string argLine); + Task ChmodAsync(string mode, string file); + Task ChownAsync(string owner, string group, string file); + } + + public sealed class UnixUtil : RunnerService, IUnixUtil + { + private ITerminal _term; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _term = hostContext.GetService(); + } + + public async Task ChmodAsync(string mode, string file) + { + Trace.Entering(); + await ExecAsync(HostContext.GetDirectory(WellKnownDirectory.Root), "chmod", $"{mode} \"{file}\""); + } + + public async Task ChownAsync(string owner, string group, string file) + { + Trace.Entering(); + await ExecAsync(HostContext.GetDirectory(WellKnownDirectory.Root), "chown", $"{owner}:{group} \"{file}\""); + } + + public async Task ExecAsync(string workingDirectory, string toolName, string argLine) + { + Trace.Entering(); + + string toolPath = WhichUtil.Which(toolName, trace: Trace); + Trace.Info($"Running {toolPath} {argLine}"); + + var processInvoker = HostContext.CreateService(); + processInvoker.OutputDataReceived += OnOutputDataReceived; + processInvoker.ErrorDataReceived += OnErrorDataReceived; + + try + { + using (var cs = new CancellationTokenSource(TimeSpan.FromSeconds(45))) + { + await processInvoker.ExecuteAsync(workingDirectory, toolPath, argLine, null, true, cs.Token); + } + } + finally + { + processInvoker.OutputDataReceived -= OnOutputDataReceived; + processInvoker.ErrorDataReceived -= OnErrorDataReceived; + } + } + + private void OnOutputDataReceived(object sender, ProcessDataReceivedEventArgs e) + { + if (!string.IsNullOrEmpty(e.Data)) + { + _term.WriteLine(e.Data); + } + } + + private void OnErrorDataReceived(object sender, ProcessDataReceivedEventArgs e) + { + if (!string.IsNullOrEmpty(e.Data)) + { + _term.WriteLine(e.Data); + } + } + } +} diff --git a/src/Runner.Common/Util/VarUtil.cs b/src/Runner.Common/Util/VarUtil.cs new file mode 100644 index 00000000000..81b8ecb2308 --- /dev/null +++ b/src/Runner.Common/Util/VarUtil.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Util +{ + public static class VarUtil + { + public static StringComparer EnvironmentVariableKeyComparer + { + get + { + switch (Constants.Runner.Platform) + { + case Constants.OSPlatform.Linux: + case Constants.OSPlatform.OSX: + return StringComparer.Ordinal; + case Constants.OSPlatform.Windows: + return StringComparer.OrdinalIgnoreCase; + default: + throw new NotSupportedException(); // Should never reach here. + } + } + } + + public static string OS + { + get + { + switch (Constants.Runner.Platform) + { + case Constants.OSPlatform.Linux: + return "Linux"; + case Constants.OSPlatform.OSX: + return "macOS"; + case Constants.OSPlatform.Windows: + return "Windows"; + default: + throw new NotSupportedException(); // Should never reach here. + } + } + } + + public static string OSArchitecture + { + get + { + switch (Constants.Runner.PlatformArchitecture) + { + case Constants.Architecture.X86: + return "X86"; + case Constants.Architecture.X64: + return "X64"; + case Constants.Architecture.Arm: + return "ARM"; + default: + throw new NotSupportedException(); // Should never reach here. + } + } + } + } +} diff --git a/src/Runner.Listener/Agent.cs b/src/Runner.Listener/Agent.cs new file mode 100644 index 00000000000..1a0a140b82d --- /dev/null +++ b/src/Runner.Listener/Agent.cs @@ -0,0 +1,493 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Util; +using System; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using System.IO; +using System.Reflection; +using System.Runtime.CompilerServices; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener +{ + [ServiceLocator(Default = typeof(Runner))] + public interface IRunner : IRunnerService + { + Task ExecuteCommand(CommandSettings command); + } + + public sealed class Runner : RunnerService, IRunner + { + private IMessageListener _listener; + private ITerminal _term; + private bool _inConfigStage; + private ManualResetEvent _completedCommand = new ManualResetEvent(false); + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _term = HostContext.GetService(); + } + + public async Task ExecuteCommand(CommandSettings command) + { + try + { + var runnerWebProxy = HostContext.GetService(); + var runnerCertManager = HostContext.GetService(); + VssUtil.InitializeVssClientSettings(HostContext.UserAgent, runnerWebProxy.WebProxy, runnerCertManager.VssClientCertificateManager); + + _inConfigStage = true; + _completedCommand.Reset(); + _term.CancelKeyPress += CtrlCHandler; + + //register a SIGTERM handler + HostContext.Unloading += Runner_Unloading; + + // TODO Unit test to cover this logic + Trace.Info(nameof(ExecuteCommand)); + var configManager = HostContext.GetService(); + + // command is not required, if no command it just starts if configured + + // TODO: Invalid config prints usage + + if (command.Help) + { + PrintUsage(command); + return Constants.Runner.ReturnCode.Success; + } + + if (command.Version) + { + _term.WriteLine(BuildConstants.RunnerPackage.Version); + return Constants.Runner.ReturnCode.Success; + } + + if (command.Commit) + { + _term.WriteLine(BuildConstants.Source.CommitHash); + return Constants.Runner.ReturnCode.Success; + } + + // Configure runner prompt for args if not supplied + // Unattended configure mode will not prompt for args if not supplied and error on any missing or invalid value. + if (command.Configure) + { + try + { + await configManager.ConfigureAsync(command); + return Constants.Runner.ReturnCode.Success; + } + catch (Exception ex) + { + Trace.Error(ex); + _term.WriteError(ex.Message); + return Constants.Runner.ReturnCode.TerminatedError; + } + } + + // remove config files, remove service, and exit + if (command.Remove) + { + try + { + await configManager.UnconfigureAsync(command); + return Constants.Runner.ReturnCode.Success; + } + catch (Exception ex) + { + Trace.Error(ex); + _term.WriteError(ex.Message); + return Constants.Runner.ReturnCode.TerminatedError; + } + } + + _inConfigStage = false; + + // warmup runner process (JIT/CLR) + // In scenarios where the runner is single use (used and then thrown away), the system provisioning the runner can call `Runner.Listener --warmup` before the machine is made available to the pool for use. + // this will optimizes the runner process startup time. + if (command.Warmup) + { + var binDir = HostContext.GetDirectory(WellKnownDirectory.Bin); + foreach (var assemblyFile in Directory.EnumerateFiles(binDir, "*.dll")) + { + try + { + Trace.Info($"Load assembly: {assemblyFile}."); + var assembly = Assembly.LoadFrom(assemblyFile); + var types = assembly.GetTypes(); + foreach (Type loadedType in types) + { + try + { + Trace.Info($"Load methods: {loadedType.FullName}."); + var methods = loadedType.GetMethods(BindingFlags.DeclaredOnly | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); + foreach (var method in methods) + { + if (!method.IsAbstract && !method.ContainsGenericParameters) + { + Trace.Verbose($"Prepare method: {method.Name}."); + RuntimeHelpers.PrepareMethod(method.MethodHandle); + } + } + } + catch (Exception ex) + { + Trace.Error(ex); + } + } + } + catch (Exception ex) + { + Trace.Error(ex); + } + } + + return Constants.Runner.ReturnCode.Success; + } + + RunnerSettings settings = configManager.LoadSettings(); + + var store = HostContext.GetService(); + bool configuredAsService = store.IsServiceConfigured(); + + // Run runner + if (command.Run) // this line is current break machine provisioner. + { + // Error if runner not configured. + if (!configManager.IsConfigured()) + { + _term.WriteError("Runner is not configured."); + PrintUsage(command); + return Constants.Runner.ReturnCode.TerminatedError; + } + + Trace.Verbose($"Configured as service: '{configuredAsService}'"); + + //Get the startup type of the runner i.e., autostartup, service, manual + StartupType startType; + var startupTypeAsString = command.GetStartupType(); + if (string.IsNullOrEmpty(startupTypeAsString) && configuredAsService) + { + // We need try our best to make the startup type accurate + // The problem is coming from runner autoupgrade, which result an old version service host binary but a newer version runner binary + // At that time the servicehost won't pass --startuptype to Runner.Listener while the runner is actually running as service. + // We will guess the startup type only when the runner is configured as service and the guess will based on whether STDOUT/STDERR/STDIN been redirect or not + Trace.Info($"Try determine runner startup type base on console redirects."); + startType = (Console.IsErrorRedirected && Console.IsInputRedirected && Console.IsOutputRedirected) ? StartupType.Service : StartupType.Manual; + } + else + { + if (!Enum.TryParse(startupTypeAsString, true, out startType)) + { + Trace.Info($"Could not parse the argument value '{startupTypeAsString}' for StartupType. Defaulting to {StartupType.Manual}"); + startType = StartupType.Manual; + } + } + +#if !OS_WINDOWS + // Fix the work folder setting on Linux + if (settings.WorkFolder.Contains("vsts", StringComparison.OrdinalIgnoreCase)) + { + var workFolder = "/runner/work"; + var unix = HostContext.GetService(); + + // create new work folder /runner/work + await unix.ExecAsync(HostContext.GetDirectory(WellKnownDirectory.Root), "sh", $"-c \"sudo mkdir -p {workFolder}\""); + + // fix permission + await unix.ExecAsync(HostContext.GetDirectory(WellKnownDirectory.Root), "sh", $"-c \"sudo chown -R $USER {workFolder}\""); + + // update settings + settings.WorkFolder = workFolder; + store.SaveSettings(settings); + } +#endif + + Trace.Info($"Set runner startup type - {startType}"); + HostContext.StartupType = startType; + + // Run the runner interactively or as service + return await RunAsync(settings, command.RunOnce); + } + else + { + PrintUsage(command); + return Constants.Runner.ReturnCode.Success; + } + } + finally + { + _term.CancelKeyPress -= CtrlCHandler; + HostContext.Unloading -= Runner_Unloading; + _completedCommand.Set(); + } + } + + private void Runner_Unloading(object sender, EventArgs e) + { + if ((!_inConfigStage) && (!HostContext.RunnerShutdownToken.IsCancellationRequested)) + { + HostContext.ShutdownRunner(ShutdownReason.UserCancelled); + _completedCommand.WaitOne(Constants.Runner.ExitOnUnloadTimeout); + } + } + + private void CtrlCHandler(object sender, EventArgs e) + { + _term.WriteLine("Exiting..."); + if (_inConfigStage) + { + HostContext.Dispose(); + Environment.Exit(Constants.Runner.ReturnCode.TerminatedError); + } + else + { + ConsoleCancelEventArgs cancelEvent = e as ConsoleCancelEventArgs; + if (cancelEvent != null && HostContext.GetService().IsServiceConfigured()) + { + ShutdownReason reason; + if (cancelEvent.SpecialKey == ConsoleSpecialKey.ControlBreak) + { + Trace.Info("Received Ctrl-Break signal from runner service host, this indicate the operating system is shutting down."); + reason = ShutdownReason.OperatingSystemShutdown; + } + else + { + Trace.Info("Received Ctrl-C signal, stop Runner.Listener and Runner.Worker."); + reason = ShutdownReason.UserCancelled; + } + + HostContext.ShutdownRunner(reason); + } + else + { + HostContext.ShutdownRunner(ShutdownReason.UserCancelled); + } + } + } + + //create worker manager, create message listener and start listening to the queue + private async Task RunAsync(RunnerSettings settings, bool runOnce = false) + { + try + { + Trace.Info(nameof(RunAsync)); + _listener = HostContext.GetService(); + if (!await _listener.CreateSessionAsync(HostContext.RunnerShutdownToken)) + { + return Constants.Runner.ReturnCode.TerminatedError; + } + + HostContext.WritePerfCounter("SessionCreated"); + _term.WriteLine($"{DateTime.UtcNow:u}: Listening for Jobs"); + + IJobDispatcher jobDispatcher = null; + CancellationTokenSource messageQueueLoopTokenSource = CancellationTokenSource.CreateLinkedTokenSource(HostContext.RunnerShutdownToken); + try + { + var notification = HostContext.GetService(); + if (!String.IsNullOrEmpty(settings.NotificationSocketAddress)) + { + notification.StartClient(settings.NotificationSocketAddress, settings.MonitorSocketAddress); + } + else + { + notification.StartClient(settings.NotificationPipeName, settings.MonitorSocketAddress, HostContext.RunnerShutdownToken); + } + + bool autoUpdateInProgress = false; + Task selfUpdateTask = null; + bool runOnceJobReceived = false; + jobDispatcher = HostContext.CreateService(); + + while (!HostContext.RunnerShutdownToken.IsCancellationRequested) + { + TaskAgentMessage message = null; + bool skipMessageDeletion = false; + try + { + Task getNextMessage = _listener.GetNextMessageAsync(messageQueueLoopTokenSource.Token); + if (autoUpdateInProgress) + { + Trace.Verbose("Auto update task running at backend, waiting for getNextMessage or selfUpdateTask to finish."); + Task completeTask = await Task.WhenAny(getNextMessage, selfUpdateTask); + if (completeTask == selfUpdateTask) + { + autoUpdateInProgress = false; + if (await selfUpdateTask) + { + Trace.Info("Auto update task finished at backend, an runner update is ready to apply exit the current runner instance."); + Trace.Info("Stop message queue looping."); + messageQueueLoopTokenSource.Cancel(); + try + { + await getNextMessage; + } + catch (Exception ex) + { + Trace.Info($"Ignore any exception after cancel message loop. {ex}"); + } + + if (runOnce) + { + return Constants.Runner.ReturnCode.RunOnceRunnerUpdating; + } + else + { + return Constants.Runner.ReturnCode.RunnerUpdating; + } + } + else + { + Trace.Info("Auto update task finished at backend, there is no available runner update needs to apply, continue message queue looping."); + } + } + } + + if (runOnceJobReceived) + { + Trace.Verbose("One time used runner has start running its job, waiting for getNextMessage or the job to finish."); + Task completeTask = await Task.WhenAny(getNextMessage, jobDispatcher.RunOnceJobCompleted.Task); + if (completeTask == jobDispatcher.RunOnceJobCompleted.Task) + { + Trace.Info("Job has finished at backend, the runner will exit since it is running under onetime use mode."); + Trace.Info("Stop message queue looping."); + messageQueueLoopTokenSource.Cancel(); + try + { + await getNextMessage; + } + catch (Exception ex) + { + Trace.Info($"Ignore any exception after cancel message loop. {ex}"); + } + + return Constants.Runner.ReturnCode.Success; + } + } + + message = await getNextMessage; //get next message + HostContext.WritePerfCounter($"MessageReceived_{message.MessageType}"); + if (string.Equals(message.MessageType, AgentRefreshMessage.MessageType, StringComparison.OrdinalIgnoreCase)) + { + if (autoUpdateInProgress == false) + { + autoUpdateInProgress = true; + var runnerUpdateMessage = JsonUtility.FromString(message.Body); + var selfUpdater = HostContext.GetService(); + selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, !runOnce && HostContext.StartupType != StartupType.Service, HostContext.RunnerShutdownToken); + Trace.Info("Refresh message received, kick-off selfupdate background process."); + } + else + { + Trace.Info("Refresh message received, skip autoupdate since a previous autoupdate is already running."); + } + } + else if (string.Equals(message.MessageType, JobRequestMessageTypes.PipelineAgentJobRequest, StringComparison.OrdinalIgnoreCase)) + { + if (autoUpdateInProgress || runOnceJobReceived) + { + skipMessageDeletion = true; + Trace.Info($"Skip message deletion for job request message '{message.MessageId}'."); + } + else + { + var jobMessage = StringUtil.ConvertFromJson(message.Body); + jobDispatcher.Run(jobMessage, runOnce); + if (runOnce) + { + Trace.Info("One time used runner received job message."); + runOnceJobReceived = true; + } + } + } + else if (string.Equals(message.MessageType, JobCancelMessage.MessageType, StringComparison.OrdinalIgnoreCase)) + { + var cancelJobMessage = JsonUtility.FromString(message.Body); + bool jobCancelled = jobDispatcher.Cancel(cancelJobMessage); + skipMessageDeletion = (autoUpdateInProgress || runOnceJobReceived) && !jobCancelled; + + if (skipMessageDeletion) + { + Trace.Info($"Skip message deletion for cancellation message '{message.MessageId}'."); + } + } + else + { + Trace.Error($"Received message {message.MessageId} with unsupported message type {message.MessageType}."); + } + } + finally + { + if (!skipMessageDeletion && message != null) + { + try + { + await _listener.DeleteMessageAsync(message); + } + catch (Exception ex) + { + Trace.Error($"Catch exception during delete message from message queue. message id: {message.MessageId}"); + Trace.Error(ex); + } + finally + { + message = null; + } + } + } + } + } + finally + { + if (jobDispatcher != null) + { + await jobDispatcher.ShutdownAsync(); + } + + //TODO: make sure we don't mask more important exception + await _listener.DeleteSessionAsync(); + + messageQueueLoopTokenSource.Dispose(); + } + } + catch (TaskAgentAccessTokenExpiredException) + { + Trace.Info("Agent OAuth token has been revoked. Shutting down."); + } + + return Constants.Runner.ReturnCode.Success; + } + + private void PrintUsage(CommandSettings command) + { + string separator; + string ext; +#if OS_WINDOWS + separator = "\\"; + ext = "cmd"; +#else + separator = "/"; + ext = "sh"; +#endif + _term.WriteLine($@" +Commands:, + .{separator}config.{ext} Configures the runner + .{separator}config.{ext} remove Unconfigures the runner + .{separator}run.{ext} Runs the runner interactively. Does not require any options. + +Options: + --version Prints the runner version + --commit Prints the runner commit + --help Prints the help for each command +"); + } + } +} diff --git a/src/Runner.Listener/CommandSettings.cs b/src/Runner.Listener/CommandSettings.cs new file mode 100644 index 00000000000..01ac2600366 --- /dev/null +++ b/src/Runner.Listener/CommandSettings.cs @@ -0,0 +1,467 @@ +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Util; +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using GitHub.DistributedTask.Logging; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener +{ + public sealed class CommandSettings + { + private readonly Dictionary _envArgs = new Dictionary(StringComparer.OrdinalIgnoreCase); + private readonly CommandLineParser _parser; + private readonly IPromptManager _promptManager; + private readonly Tracing _trace; + + private readonly string[] validCommands = + { + Constants.Runner.CommandLine.Commands.Configure, + Constants.Runner.CommandLine.Commands.Remove, + Constants.Runner.CommandLine.Commands.Run, + Constants.Runner.CommandLine.Commands.Warmup, + }; + + private readonly string[] validFlags = + { + Constants.Runner.CommandLine.Flags.Commit, +#if OS_WINDOWS + Constants.Runner.CommandLine.Flags.GitUseSChannel, +#endif + Constants.Runner.CommandLine.Flags.Help, + Constants.Runner.CommandLine.Flags.Replace, + Constants.Runner.CommandLine.Flags.RunAsService, + Constants.Runner.CommandLine.Flags.Once, + Constants.Runner.CommandLine.Flags.SslSkipCertValidation, + Constants.Runner.CommandLine.Flags.Unattended, + Constants.Runner.CommandLine.Flags.Version + }; + + private readonly string[] validArgs = + { + Constants.Runner.CommandLine.Args.Agent, + Constants.Runner.CommandLine.Args.Auth, + Constants.Runner.CommandLine.Args.MonitorSocketAddress, + Constants.Runner.CommandLine.Args.NotificationPipeName, + Constants.Runner.CommandLine.Args.Password, + Constants.Runner.CommandLine.Args.Pool, + Constants.Runner.CommandLine.Args.ProxyPassword, + Constants.Runner.CommandLine.Args.ProxyUrl, + Constants.Runner.CommandLine.Args.ProxyUserName, + Constants.Runner.CommandLine.Args.SslCACert, + Constants.Runner.CommandLine.Args.SslClientCert, + Constants.Runner.CommandLine.Args.SslClientCertKey, + Constants.Runner.CommandLine.Args.SslClientCertArchive, + Constants.Runner.CommandLine.Args.SslClientCertPassword, + Constants.Runner.CommandLine.Args.StartupType, + Constants.Runner.CommandLine.Args.Token, + Constants.Runner.CommandLine.Args.Url, + Constants.Runner.CommandLine.Args.UserName, + Constants.Runner.CommandLine.Args.WindowsLogonAccount, + Constants.Runner.CommandLine.Args.WindowsLogonPassword, + Constants.Runner.CommandLine.Args.Work + }; + + // Commands. + public bool Configure => TestCommand(Constants.Runner.CommandLine.Commands.Configure); + public bool Remove => TestCommand(Constants.Runner.CommandLine.Commands.Remove); + public bool Run => TestCommand(Constants.Runner.CommandLine.Commands.Run); + public bool Warmup => TestCommand(Constants.Runner.CommandLine.Commands.Warmup); + + // Flags. + public bool Commit => TestFlag(Constants.Runner.CommandLine.Flags.Commit); + public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help); + public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended); + public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version); + +#if OS_WINDOWS + public bool GitUseSChannel => TestFlag(Constants.Runner.CommandLine.Flags.GitUseSChannel); +#endif + public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once); + + // Constructor. + public CommandSettings(IHostContext context, string[] args) + { + ArgUtil.NotNull(context, nameof(context)); + _promptManager = context.GetService(); + _trace = context.GetTrace(nameof(CommandSettings)); + + // Parse the command line args. + _parser = new CommandLineParser( + hostContext: context, + secretArgNames: Constants.Runner.CommandLine.Args.Secrets); + _parser.Parse(args); + + // Store and remove any args passed via environment variables. + IDictionary environment = Environment.GetEnvironmentVariables(); + string envPrefix = "ACTIONS_RUNNER_INPUT_"; + foreach (DictionaryEntry entry in environment) + { + // Test if starts with ACTIONS_RUNNER_INPUT_. + string fullKey = entry.Key as string ?? string.Empty; + if (fullKey.StartsWith(envPrefix, StringComparison.OrdinalIgnoreCase)) + { + string val = (entry.Value as string ?? string.Empty).Trim(); + if (!string.IsNullOrEmpty(val)) + { + // Extract the name. + string name = fullKey.Substring(envPrefix.Length); + + // Mask secrets. + bool secret = Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)); + if (secret) + { + context.SecretMasker.AddValue(val); + } + + // Store the value. + _envArgs[name] = val; + } + + // Remove from the environment block. + _trace.Info($"Removing env var: '{fullKey}'"); + Environment.SetEnvironmentVariable(fullKey, null); + } + } + } + + // Validate commandline parser result + public List Validate() + { + List unknowns = new List(); + + // detect unknown commands + unknowns.AddRange(_parser.Commands.Where(x => !validCommands.Contains(x, StringComparer.OrdinalIgnoreCase))); + + // detect unknown flags + unknowns.AddRange(_parser.Flags.Where(x => !validFlags.Contains(x, StringComparer.OrdinalIgnoreCase))); + + // detect unknown args + unknowns.AddRange(_parser.Args.Keys.Where(x => !validArgs.Contains(x, StringComparer.OrdinalIgnoreCase))); + + return unknowns; + } + + // + // Interactive flags. + // + public bool GetReplace() + { + return TestFlagOrPrompt( + name: Constants.Runner.CommandLine.Flags.Replace, + description: "Would you like to replace the existing runner? (Y/N)", + defaultValue: false); + } + + public bool GetRunAsService() + { + return TestFlagOrPrompt( + name: Constants.Runner.CommandLine.Flags.RunAsService, + description: "Would you like to run the runner as service? (Y/N)", + defaultValue: false); + } + + public bool GetAutoLaunchBrowser() + { + return TestFlagOrPrompt( + name: Constants.Runner.CommandLine.Flags.LaunchBrowser, + description: "Would you like to launch your browser for AAD Device Code Flow? (Y/N)", + defaultValue: true); + } + // + // Args. + // + public string GetAgentName() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Agent, + description: "Enter the name of runner:", + defaultValue: Environment.MachineName ?? "myagent", + validator: Validators.NonEmptyValidator); + } + + public string GetAuth(string defaultValue) + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Auth, + description: "How would you like to authenticate?", + defaultValue: defaultValue, + validator: Validators.AuthSchemeValidator); + } + + public string GetPassword() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Password, + description: "What is your GitHub password?", + defaultValue: string.Empty, + validator: Validators.NonEmptyValidator); + } + + public string GetPool() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Pool, + description: "Enter the name of your runner pool:", + defaultValue: "default", + validator: Validators.NonEmptyValidator); + } + + public string GetToken() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Token, + description: "Enter your personal access token:", + defaultValue: string.Empty, + validator: Validators.NonEmptyValidator); + } + + public string GetRunnerRegisterToken() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Token, + description: "Enter runner register token:", + defaultValue: string.Empty, + validator: Validators.NonEmptyValidator); + } + + public string GetUrl(bool suppressPromptIfEmpty = false) + { + // Note, GetArg does not consume the arg (like GetArgOrPrompt does). + if (suppressPromptIfEmpty && + string.IsNullOrEmpty(GetArg(Constants.Runner.CommandLine.Args.Url))) + { + return string.Empty; + } + + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Url, + description: "What is the URL of your repository?", + defaultValue: string.Empty, + validator: Validators.ServerUrlValidator); + } + + public string GetUserName() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.UserName, + description: "What is your GitHub username?", + defaultValue: string.Empty, + validator: Validators.NonEmptyValidator); + } + + public string GetWindowsLogonAccount(string defaultValue, string descriptionMsg) + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.WindowsLogonAccount, + description: descriptionMsg, + defaultValue: defaultValue, + validator: Validators.NTAccountValidator); + } + + public string GetWindowsLogonPassword(string accountName) + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.WindowsLogonPassword, + description: $"Password for the account {accountName}", + defaultValue: string.Empty, + validator: Validators.NonEmptyValidator); + } + + public string GetWork() + { + return GetArgOrPrompt( + name: Constants.Runner.CommandLine.Args.Work, + description: "Enter name of work folder:", + defaultValue: Constants.Path.WorkDirectory, + validator: Validators.NonEmptyValidator); + } + + public string GetMonitorSocketAddress() + { + return GetArg(Constants.Runner.CommandLine.Args.MonitorSocketAddress); + } + + public string GetNotificationPipeName() + { + return GetArg(Constants.Runner.CommandLine.Args.NotificationPipeName); + } + + public string GetNotificationSocketAddress() + { + return GetArg(Constants.Runner.CommandLine.Args.NotificationSocketAddress); + } + + // This is used to find out the source from where the Runner.Listener.exe was launched at the time of run + public string GetStartupType() + { + return GetArg(Constants.Runner.CommandLine.Args.StartupType); + } + + public string GetProxyUrl() + { + return GetArg(Constants.Runner.CommandLine.Args.ProxyUrl); + } + + public string GetProxyUserName() + { + return GetArg(Constants.Runner.CommandLine.Args.ProxyUserName); + } + + public string GetProxyPassword() + { + return GetArg(Constants.Runner.CommandLine.Args.ProxyPassword); + } + + public bool GetSkipCertificateValidation() + { + return TestFlag(Constants.Runner.CommandLine.Flags.SslSkipCertValidation); + } + + public string GetCACertificate() + { + return GetArg(Constants.Runner.CommandLine.Args.SslCACert); + } + + public string GetClientCertificate() + { + return GetArg(Constants.Runner.CommandLine.Args.SslClientCert); + } + + public string GetClientCertificatePrivateKey() + { + return GetArg(Constants.Runner.CommandLine.Args.SslClientCertKey); + } + + public string GetClientCertificateArchrive() + { + return GetArg(Constants.Runner.CommandLine.Args.SslClientCertArchive); + } + + public string GetClientCertificatePassword() + { + return GetArg(Constants.Runner.CommandLine.Args.SslClientCertPassword); + } + + // + // Private helpers. + // + private string GetArg(string name) + { + string result; + if (!_parser.Args.TryGetValue(name, out result)) + { + result = GetEnvArg(name); + } + + return result; + } + + private void RemoveArg(string name) + { + if (_parser.Args.ContainsKey(name)) + { + _parser.Args.Remove(name); + } + + if (_envArgs.ContainsKey(name)) + { + _envArgs.Remove(name); + } + } + + private string GetArgOrPrompt( + string name, + string description, + string defaultValue, + Func validator) + { + // Check for the arg in the command line parser. + ArgUtil.NotNull(validator, nameof(validator)); + string result = GetArg(name); + + // Return the arg if it is not empty and is valid. + _trace.Info($"Arg '{name}': '{result}'"); + if (!string.IsNullOrEmpty(result)) + { + // After read the arg from input commandline args, remove it from Arg dictionary, + // This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time, + // It will prompt for input instead of continue use the bad input. + _trace.Info($"Remove {name} from Arg dictionary."); + RemoveArg(name); + + if (validator(result)) + { + return result; + } + + _trace.Info("Arg is invalid."); + } + + // Otherwise prompt for the arg. + return _promptManager.ReadValue( + argName: name, + description: description, + secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)), + defaultValue: defaultValue, + validator: validator, + unattended: Unattended); + } + + private string GetEnvArg(string name) + { + string val; + if (_envArgs.TryGetValue(name, out val) && !string.IsNullOrEmpty(val)) + { + _trace.Info($"Env arg '{name}': '{val}'"); + return val; + } + + return null; + } + + private bool TestCommand(string name) + { + bool result = _parser.IsCommand(name); + _trace.Info($"Command '{name}': '{result}'"); + return result; + } + + private bool TestFlag(string name) + { + bool result = _parser.Flags.Contains(name); + if (!result) + { + string envStr = GetEnvArg(name); + if (!bool.TryParse(envStr, out result)) + { + result = false; + } + } + + _trace.Info($"Flag '{name}': '{result}'"); + return result; + } + + private bool TestFlagOrPrompt( + string name, + string description, + bool defaultValue) + { + bool result = TestFlag(name); + if (!result) + { + result = _promptManager.ReadBool( + argName: name, + description: description, + defaultValue: defaultValue, + unattended: Unattended); + } + + return result; + } + } +} diff --git a/src/Runner.Listener/Configuration/ConfigurationManager.cs b/src/Runner.Listener/Configuration/ConfigurationManager.cs new file mode 100644 index 00000000000..b544e307ad5 --- /dev/null +++ b/src/Runner.Listener/Configuration/ConfigurationManager.cs @@ -0,0 +1,667 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Common.Util; +using GitHub.Services.Common; +using GitHub.Services.OAuth; +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Security.Cryptography; +using System.Security.Principal; +using System.Threading; +using System.Threading.Tasks; +using System.Runtime.InteropServices; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; + +namespace GitHub.Runner.Listener.Configuration +{ + [ServiceLocator(Default = typeof(ConfigurationManager))] + public interface IConfigurationManager : IRunnerService + { + bool IsConfigured(); + Task ConfigureAsync(CommandSettings command); + Task UnconfigureAsync(CommandSettings command); + RunnerSettings LoadSettings(); + } + + public sealed class ConfigurationManager : RunnerService, IConfigurationManager + { + private IConfigurationStore _store; + private IRunnerServer _runnerServer; + private ITerminal _term; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _runnerServer = HostContext.GetService(); + Trace.Verbose("Creating _store"); + _store = hostContext.GetService(); + Trace.Verbose("store created"); + _term = hostContext.GetService(); + } + + public bool IsConfigured() + { + bool result = _store.IsConfigured(); + Trace.Info($"Is configured: {result}"); + return result; + } + + public RunnerSettings LoadSettings() + { + Trace.Info(nameof(LoadSettings)); + if (!IsConfigured()) + { + throw new InvalidOperationException("Not configured"); + } + + RunnerSettings settings = _store.GetSettings(); + Trace.Info("Settings Loaded"); + + return settings; + } + + public async Task ConfigureAsync(CommandSettings command) + { + _term.WriteLine(); + _term.WriteLine("--------------------------------------------------------------------------------", ConsoleColor.White); + _term.WriteLine("| ____ _ _ _ _ _ _ _ _ |", ConsoleColor.White); + _term.WriteLine("| / ___(_) |_| | | |_ _| |__ / \\ ___| |_(_) ___ _ __ ___ |", ConsoleColor.White); + _term.WriteLine("| | | _| | __| |_| | | | | '_ \\ / _ \\ / __| __| |/ _ \\| '_ \\/ __| |", ConsoleColor.White); + _term.WriteLine("| | |_| | | |_| _ | |_| | |_) | / ___ \\ (__| |_| | (_) | | | \\__ \\ |", ConsoleColor.White); + _term.WriteLine("| \\____|_|\\__|_| |_|\\__,_|_.__/ /_/ \\_\\___|\\__|_|\\___/|_| |_|___/ |", ConsoleColor.White); + _term.WriteLine("| |", ConsoleColor.White); + _term.Write("| ", ConsoleColor.White); + _term.Write("Self-hosted runner registration", ConsoleColor.Cyan); + _term.WriteLine(" |", ConsoleColor.White); + _term.WriteLine("| |", ConsoleColor.White); + _term.WriteLine("--------------------------------------------------------------------------------", ConsoleColor.White); + + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + Trace.Info(nameof(ConfigureAsync)); + if (IsConfigured()) + { + throw new InvalidOperationException("Cannot configure the runner because it is already configured. To reconfigure the runner, run 'config.cmd remove' or './config.sh remove' first."); + } + + // Populate proxy setting from commandline args + var runnerProxy = HostContext.GetService(); + bool saveProxySetting = false; + string proxyUrl = command.GetProxyUrl(); + if (!string.IsNullOrEmpty(proxyUrl)) + { + if (!Uri.IsWellFormedUriString(proxyUrl, UriKind.Absolute)) + { + throw new ArgumentOutOfRangeException(nameof(proxyUrl)); + } + + Trace.Info("Reset proxy base on commandline args."); + string proxyUserName = command.GetProxyUserName(); + string proxyPassword = command.GetProxyPassword(); + (runnerProxy as RunnerWebProxy).SetupProxy(proxyUrl, proxyUserName, proxyPassword); + saveProxySetting = true; + } + + // Populate cert setting from commandline args + var runnerCertManager = HostContext.GetService(); + bool saveCertSetting = false; + bool skipCertValidation = command.GetSkipCertificateValidation(); + string caCert = command.GetCACertificate(); + string clientCert = command.GetClientCertificate(); + string clientCertKey = command.GetClientCertificatePrivateKey(); + string clientCertArchive = command.GetClientCertificateArchrive(); + string clientCertPassword = command.GetClientCertificatePassword(); + + // We require all Certificate files are under agent root. + // So we can set ACL correctly when configure as service + if (!string.IsNullOrEmpty(caCert)) + { + caCert = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), caCert); + ArgUtil.File(caCert, nameof(caCert)); + } + + if (!string.IsNullOrEmpty(clientCert) && + !string.IsNullOrEmpty(clientCertKey) && + !string.IsNullOrEmpty(clientCertArchive)) + { + // Ensure all client cert pieces are there. + clientCert = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), clientCert); + clientCertKey = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), clientCertKey); + clientCertArchive = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), clientCertArchive); + + ArgUtil.File(clientCert, nameof(clientCert)); + ArgUtil.File(clientCertKey, nameof(clientCertKey)); + ArgUtil.File(clientCertArchive, nameof(clientCertArchive)); + } + else if (!string.IsNullOrEmpty(clientCert) || + !string.IsNullOrEmpty(clientCertKey) || + !string.IsNullOrEmpty(clientCertArchive)) + { + // Print out which args are missing. + ArgUtil.NotNullOrEmpty(Constants.Runner.CommandLine.Args.SslClientCert, Constants.Runner.CommandLine.Args.SslClientCert); + ArgUtil.NotNullOrEmpty(Constants.Runner.CommandLine.Args.SslClientCertKey, Constants.Runner.CommandLine.Args.SslClientCertKey); + ArgUtil.NotNullOrEmpty(Constants.Runner.CommandLine.Args.SslClientCertArchive, Constants.Runner.CommandLine.Args.SslClientCertArchive); + } + + if (skipCertValidation || !string.IsNullOrEmpty(caCert) || !string.IsNullOrEmpty(clientCert)) + { + Trace.Info("Reset runner cert setting base on commandline args."); + (runnerCertManager as RunnerCertificateManager).SetupCertificate(skipCertValidation, caCert, clientCert, clientCertKey, clientCertArchive, clientCertPassword); + saveCertSetting = true; + } + + RunnerSettings runnerSettings = new RunnerSettings(); + + bool isHostedServer = false; + // Loop getting url and creds until you can connect + ICredentialProvider credProvider = null; + VssCredentials creds = null; + _term.WriteSection("Authentication"); + while (true) + { + // Get the URL + var inputUrl = command.GetUrl(); + if (!inputUrl.Contains("github.com", StringComparison.OrdinalIgnoreCase)) + { + runnerSettings.ServerUrl = inputUrl; + // Get the credentials + credProvider = GetCredentialProvider(command, runnerSettings.ServerUrl); + creds = credProvider.GetVssCredentials(HostContext); + Trace.Info("legacy vss cred retrieved"); + } + else + { + runnerSettings.GitHubUrl = inputUrl; + var githubToken = command.GetRunnerRegisterToken(); + GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken); + runnerSettings.ServerUrl = authResult.TenantUrl; + creds = authResult.ToVssCredentials(); + Trace.Info("cred retrieved via GitHub auth"); + } + + try + { + // Determine the service deployment type based on connection data. (Hosted/OnPremises) + isHostedServer = await IsHostedServer(runnerSettings.ServerUrl, creds); + + // Validate can connect. + await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds); + + _term.WriteLine(); + _term.WriteSuccessMessage("Connected to GitHub"); + + Trace.Info("Test Connection complete."); + break; + } + catch (Exception e) when (!command.Unattended) + { + _term.WriteError(e); + _term.WriteError("Failed to connect. Try again or ctrl-c to quit"); + _term.WriteLine(); + } + } + + // We want to use the native CSP of the platform for storage, so we use the RSACSP directly + RSAParameters publicKey; + var keyManager = HostContext.GetService(); + using (var rsa = keyManager.CreateKey()) + { + publicKey = rsa.ExportParameters(false); + } + + _term.WriteSection("Runner Registration"); + + //Get all the agent pools, and select the first private pool + List agentPools = await _runnerServer.GetAgentPoolsAsync(); + TaskAgentPool agentPool = agentPools?.Where(x => x.IsHosted == false).FirstOrDefault(); + + if (agentPool == null) + { + throw new TaskAgentPoolNotFoundException($"Could not find any private pool. Contact support."); + } + else + { + Trace.Info("Found a private pool with id {1} and name {2}", agentPool.Id, agentPool.Name); + runnerSettings.PoolId = agentPool.Id; + runnerSettings.PoolName = agentPool.Name; + } + + TaskAgent agent; + while (true) + { + runnerSettings.AgentName = command.GetAgentName(); + + // Get the system capabilities. + Dictionary systemCapabilities = await HostContext.GetService().GetCapabilitiesAsync(runnerSettings, CancellationToken.None); + + _term.WriteLine(); + + var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName); + Trace.Verbose("Returns {0} agents", agents.Count); + agent = agents.FirstOrDefault(); + if (agent != null) + { + _term.WriteLine("A runner exists with the same name", ConsoleColor.Yellow); + if (command.GetReplace()) + { + // Update existing agent with new PublicKey, agent version and SystemCapabilities. + agent = UpdateExistingAgent(agent, publicKey, systemCapabilities); + + try + { + agent = await _runnerServer.UpdateAgentAsync(runnerSettings.PoolId, agent); + _term.WriteSuccessMessage("Successfully replaced the runner"); + break; + } + catch (Exception e) when (!command.Unattended) + { + _term.WriteError(e); + _term.WriteError("Failed to replace the runner. Try again or ctrl-c to quit"); + } + } + else if (command.Unattended) + { + // if not replace and it is unattended config. + throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}."); + } + } + else + { + // Create a new agent. + agent = CreateNewAgent(runnerSettings.AgentName, publicKey, systemCapabilities); + + try + { + agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent); + _term.WriteSuccessMessage("Runner successfully added"); + break; + } + catch (Exception e) when (!command.Unattended) + { + _term.WriteError(e); + _term.WriteError("Failed to add the runner. Try again or ctrl-c to quit"); + } + } + } + // Add Agent Id to settings + runnerSettings.AgentId = agent.Id; + + // respect the serverUrl resolve by server. + // in case of agent configured using collection url instead of account url. + string agentServerUrl; + if (agent.Properties.TryGetValidatedValue("ServerUrl", out agentServerUrl) && + !string.IsNullOrEmpty(agentServerUrl)) + { + Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'."); + + // we need make sure the Schema/Host/Port component of the url remain the same. + UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl); + UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl); + if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0) + { + inputServerUrl.Path = serverReturnedServerUrl.Path; + Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'."); + runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri; + } + else + { + runnerSettings.ServerUrl = agentServerUrl; + } + } + + // See if the server supports our OAuth key exchange for credentials + if (agent.Authorization != null && + agent.Authorization.ClientId != Guid.Empty && + agent.Authorization.AuthorizationUrl != null) + { + UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl); + UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl); + if (!isHostedServer && Uri.Compare(configServerUrl.Uri, oauthEndpointUrlBuilder.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0) + { + oauthEndpointUrlBuilder.Scheme = configServerUrl.Scheme; + oauthEndpointUrlBuilder.Host = configServerUrl.Host; + oauthEndpointUrlBuilder.Port = configServerUrl.Port; + Trace.Info($"Set oauth endpoint url's scheme://host:port component to match runner configure url's scheme://host:port: '{oauthEndpointUrlBuilder.Uri.AbsoluteUri}'."); + } + + var credentialData = new CredentialData + { + Scheme = Constants.Configuration.OAuth, + Data = + { + { "clientId", agent.Authorization.ClientId.ToString("D") }, + { "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri }, + { "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri }, + }, + }; + + // Save the negotiated OAuth credential data + _store.SaveCredential(credentialData); + } + else + { + + throw new NotSupportedException("Message queue listen OAuth token."); + } + + // Testing agent connection, detect any protential connection issue, like local clock skew that cause OAuth token expired. + var credMgr = HostContext.GetService(); + VssCredentials credential = credMgr.LoadCredentials(); + try + { + await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential); + _term.WriteSuccessMessage("Runner connection is good"); + } + catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is")) + { + // there are two exception messages server send that indicate clock skew. + // 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}. + // 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}. + Trace.Error("Catch exception during test agent connection."); + Trace.Error(ex); + throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again."); + } + + _term.WriteSection("Runner settings"); + + // We will Combine() what's stored with root. Defaults to string a relative path + runnerSettings.WorkFolder = command.GetWork(); + + // notificationPipeName for Hosted agent provisioner. + runnerSettings.NotificationPipeName = command.GetNotificationPipeName(); + + runnerSettings.MonitorSocketAddress = command.GetMonitorSocketAddress(); + + runnerSettings.NotificationSocketAddress = command.GetNotificationSocketAddress(); + + _store.SaveSettings(runnerSettings); + + if (saveProxySetting) + { + Trace.Info("Save proxy setting to disk."); + (runnerProxy as RunnerWebProxy).SaveProxySetting(); + } + + if (saveCertSetting) + { + Trace.Info("Save agent cert setting to disk."); + (runnerCertManager as RunnerCertificateManager).SaveCertificateSetting(); + } + + _term.WriteLine(); + _term.WriteSuccessMessage("Settings Saved."); + _term.WriteLine(); + + bool saveRuntimeOptions = false; + var runtimeOptions = new RunnerRuntimeOptions(); +#if OS_WINDOWS + if (command.GitUseSChannel) + { + saveRuntimeOptions = true; + runtimeOptions.GitUseSecureChannel = true; + } +#endif + if (saveRuntimeOptions) + { + Trace.Info("Save agent runtime options to disk."); + _store.SaveRunnerRuntimeOptions(runtimeOptions); + } + +#if OS_WINDOWS + // config windows service + bool runAsService = command.GetRunAsService(); + if (runAsService) + { + Trace.Info("Configuring to run the agent as service"); + var serviceControlManager = HostContext.GetService(); + serviceControlManager.ConfigureService(runnerSettings, command); + } + +#elif OS_LINUX || OS_OSX + // generate service config script for OSX and Linux, GenerateScripts() will no-opt on windows. + var serviceControlManager = HostContext.GetService(); + serviceControlManager.GenerateScripts(runnerSettings); +#endif + } + + public async Task UnconfigureAsync(CommandSettings command) + { + ArgUtil.Equal(RunMode.Normal, HostContext.RunMode, nameof(HostContext.RunMode)); + string currentAction = string.Empty; + + _term.WriteSection("Runner removal"); + + try + { + //stop, uninstall service and remove service config file + if (_store.IsServiceConfigured()) + { + currentAction = "Removing service"; + _term.WriteLine(currentAction); +#if OS_WINDOWS + var serviceControlManager = HostContext.GetService(); + serviceControlManager.UnconfigureService(); + + _term.WriteLine(); + _term.WriteSuccessMessage("Runner service removed"); +#elif OS_LINUX + // unconfig system D service first + throw new Exception("Unconfigure service first"); +#elif OS_OSX + // unconfig osx service first + throw new Exception("Unconfigure service first"); +#endif + } + + //delete agent from the server + currentAction = "Removing runner from the server"; + bool isConfigured = _store.IsConfigured(); + bool hasCredentials = _store.HasCredentials(); + if (isConfigured && hasCredentials) + { + RunnerSettings settings = _store.GetSettings(); + var credentialManager = HostContext.GetService(); + + // Get the credentials + VssCredentials creds = null; + if (string.IsNullOrEmpty(settings.GitHubUrl)) + { + var credProvider = GetCredentialProvider(command, settings.ServerUrl); + creds = credProvider.GetVssCredentials(HostContext); + Trace.Info("legacy vss cred retrieved"); + } + else + { + var githubToken = command.GetToken(); + GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken); + creds = authResult.ToVssCredentials(); + Trace.Info("cred retrieved via GitHub auth"); + } + + // Determine the service deployment type based on connection data. (Hosted/OnPremises) + bool isHostedServer = await IsHostedServer(settings.ServerUrl, creds); + await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds); + + var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName); + Trace.Verbose("Returns {0} agents", agents.Count); + TaskAgent agent = agents.FirstOrDefault(); + if (agent == null) + { + _term.WriteLine("Does not exist. Skipping " + currentAction); + } + else + { + await _runnerServer.DeleteAgentAsync(settings.PoolId, settings.AgentId); + + _term.WriteLine(); + _term.WriteSuccessMessage("Runner removed successfully"); + } + } + else + { + _term.WriteLine("Cannot connect to server, because config files are missing. Skipping removing runner from the server."); + } + + //delete credential config files + currentAction = "Removing .credentials"; + if (hasCredentials) + { + _store.DeleteCredential(); + var keyManager = HostContext.GetService(); + keyManager.DeleteKey(); + _term.WriteSuccessMessage("Removed .credentials"); + } + else + { + _term.WriteLine("Does not exist. Skipping " + currentAction); + } + + //delete settings config file + currentAction = "Removing .runner"; + if (isConfigured) + { + // delete proxy setting + (HostContext.GetService() as RunnerWebProxy).DeleteProxySetting(); + + // delete agent cert setting + (HostContext.GetService() as RunnerCertificateManager).DeleteCertificateSetting(); + + // delete agent runtime option + _store.DeleteRunnerRuntimeOptions(); + + _store.DeleteSettings(); + _term.WriteSuccessMessage("Removed .runner"); + } + else + { + _term.WriteLine("Does not exist. Skipping " + currentAction); + } + } + catch (Exception) + { + _term.WriteError("Failed: " + currentAction); + throw; + } + + _term.WriteLine(); + } + + private ICredentialProvider GetCredentialProvider(CommandSettings command, string serverUrl) + { + Trace.Info(nameof(GetCredentialProvider)); + + var credentialManager = HostContext.GetService(); + string authType = command.GetAuth(defaultValue: Constants.Configuration.AAD); + + // Create the credential. + Trace.Info("Creating credential for auth: {0}", authType); + var provider = credentialManager.GetCredentialProvider(authType); + if (provider.RequireInteractive && command.Unattended) + { + throw new NotSupportedException($"Authentication type '{authType}' is not supported for unattended configuration."); + } + + provider.EnsureCredential(HostContext, command, serverUrl); + return provider; + } + + + private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, Dictionary systemCapabilities) + { + ArgUtil.NotNull(agent, nameof(agent)); + agent.Authorization = new TaskAgentAuthorization + { + PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus), + }; + + // update - update instead of delete so we don't lose user capabilities etc... + agent.Version = BuildConstants.RunnerPackage.Version; + agent.OSDescription = RuntimeInformation.OSDescription; + + foreach (KeyValuePair capability in systemCapabilities) + { + agent.SystemCapabilities[capability.Key] = capability.Value ?? string.Empty; + } + + return agent; + } + + private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, Dictionary systemCapabilities) + { + TaskAgent agent = new TaskAgent(agentName) + { + Authorization = new TaskAgentAuthorization + { + PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus), + }, + MaxParallelism = 1, + Version = BuildConstants.RunnerPackage.Version, + OSDescription = RuntimeInformation.OSDescription, + }; + + foreach (KeyValuePair capability in systemCapabilities) + { + agent.SystemCapabilities[capability.Key] = capability.Value ?? string.Empty; + } + + return agent; + } + + private async Task IsHostedServer(string serverUrl, VssCredentials credentials) + { + // Determine the service deployment type based on connection data. (Hosted/OnPremises) + var locationServer = HostContext.GetService(); + VssConnection connection = VssUtil.CreateConnection(new Uri(serverUrl), credentials); + await locationServer.ConnectAsync(connection); + try + { + var connectionData = await locationServer.GetConnectionDataAsync(); + Trace.Info($"Server deployment type: {connectionData.DeploymentType}"); + return connectionData.DeploymentType.HasFlag(DeploymentFlags.Hosted); + } + catch (Exception ex) + { + // Since the DeploymentType is Enum, deserialization exception means there is a new Enum member been added. + // It's more likely to be Hosted since OnPremises is always behind and customer can update their agent if are on-prem + Trace.Error(ex); + return true; + } + } + + private async Task GetTenantCredential(string githubUrl, string githubToken) + { + var gitHubUrl = new UriBuilder(githubUrl); + var githubApiUrl = $"https://api.github.com/repos/{gitHubUrl.Path.Trim('/')}/actions-runners/registration"; + using (var httpClientHandler = HostContext.CreateHttpClientHandler()) + using (var httpClient = new HttpClient(httpClientHandler)) + { + httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken); + httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent); + httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.github.shuri-preview+json")); + var response = await httpClient.PostAsync(githubApiUrl, new StringContent("", null, "application/json")); + + if (response.IsSuccessStatusCode) + { + Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'"); + var jsonResponse = await response.Content.ReadAsStringAsync(); + return StringUtil.ConvertFromJson(jsonResponse); + } + else + { + _term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'"); + var errorResponse = await response.Content.ReadAsStringAsync(); + _term.WriteError(errorResponse); + response.EnsureSuccessStatusCode(); + return null; + } + } + } + } +} diff --git a/src/Runner.Listener/Configuration/CredentialManager.cs b/src/Runner.Listener/Configuration/CredentialManager.cs new file mode 100644 index 00000000000..34e540fe620 --- /dev/null +++ b/src/Runner.Listener/Configuration/CredentialManager.cs @@ -0,0 +1,91 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.Services.Common; +using GitHub.Services.OAuth; + +namespace GitHub.Runner.Listener.Configuration +{ + // TODO: Refactor extension manager to enable using it from the agent process. + [ServiceLocator(Default = typeof(CredentialManager))] + public interface ICredentialManager : IRunnerService + { + ICredentialProvider GetCredentialProvider(string credType); + VssCredentials LoadCredentials(); + } + + public class CredentialManager : RunnerService, ICredentialManager + { + public static readonly Dictionary CredentialTypes = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { Constants.Configuration.AAD, typeof(AadDeviceCodeAccessToken)}, + { Constants.Configuration.PAT, typeof(PersonalAccessToken)}, + { Constants.Configuration.OAuth, typeof(OAuthCredential)}, + { Constants.Configuration.OAuthAccessToken, typeof(OAuthAccessTokenCredential)}, + }; + + public ICredentialProvider GetCredentialProvider(string credType) + { + Trace.Info(nameof(GetCredentialProvider)); + Trace.Info("Creating type {0}", credType); + + if (!CredentialTypes.ContainsKey(credType)) + { + throw new ArgumentException("Invalid Credential Type"); + } + + Trace.Info("Creating credential type: {0}", credType); + var creds = Activator.CreateInstance(CredentialTypes[credType]) as ICredentialProvider; + Trace.Verbose("Created credential type"); + return creds; + } + + public VssCredentials LoadCredentials() + { + IConfigurationStore store = HostContext.GetService(); + + if (!store.HasCredentials()) + { + throw new InvalidOperationException("Credentials not stored. Must reconfigure."); + } + + CredentialData credData = store.GetCredentials(); + ICredentialProvider credProv = GetCredentialProvider(credData.Scheme); + credProv.CredentialData = credData; + + VssCredentials creds = credProv.GetVssCredentials(HostContext); + + return creds; + } + } + + [DataContract] + public sealed class GitHubAuthResult + { + [DataMember(Name = "url")] + public string TenantUrl { get; set; } + + [DataMember(Name = "token_schema")] + public string TokenSchema { get; set; } + + [DataMember(Name = "token")] + public string Token { get; set; } + + public VssCredentials ToVssCredentials() + { + ArgUtil.NotNullOrEmpty(TokenSchema, nameof(TokenSchema)); + ArgUtil.NotNullOrEmpty(Token, nameof(Token)); + + if (string.Equals(TokenSchema, "OAuthAccessToken", StringComparison.OrdinalIgnoreCase)) + { + return new VssCredentials(null, new VssOAuthAccessTokenCredential(Token), CredentialPromptType.DoNotPrompt); + } + else + { + throw new NotSupportedException($"Not supported token schema: {TokenSchema}"); + } + } + } +} diff --git a/src/Runner.Listener/Configuration/CredentialProvider.cs b/src/Runner.Listener/Configuration/CredentialProvider.cs new file mode 100644 index 00000000000..5223c08033d --- /dev/null +++ b/src/Runner.Listener/Configuration/CredentialProvider.cs @@ -0,0 +1,231 @@ +using System; +using System.Diagnostics; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using Microsoft.IdentityModel.Clients.ActiveDirectory; +using GitHub.Runner.Common.Util; +using GitHub.Services.Client; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.Services.OAuth; + +namespace GitHub.Runner.Listener.Configuration +{ + public interface ICredentialProvider + { + Boolean RequireInteractive { get; } + CredentialData CredentialData { get; set; } + VssCredentials GetVssCredentials(IHostContext context); + void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl); + } + + public abstract class CredentialProvider : ICredentialProvider + { + public CredentialProvider(string scheme) + { + CredentialData = new CredentialData(); + CredentialData.Scheme = scheme; + } + + public virtual Boolean RequireInteractive => false; + public CredentialData CredentialData { get; set; } + + public abstract VssCredentials GetVssCredentials(IHostContext context); + public abstract void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl); + } + + public sealed class AadDeviceCodeAccessToken : CredentialProvider + { + private string _azureDevOpsClientId = "97877f11-0fc6-4aee-b1ff-febb0519dd00"; + + public override Boolean RequireInteractive => true; + + public AadDeviceCodeAccessToken() : base(Constants.Configuration.AAD) { } + + public override VssCredentials GetVssCredentials(IHostContext context) + { + ArgUtil.NotNull(context, nameof(context)); + Tracing trace = context.GetTrace(nameof(AadDeviceCodeAccessToken)); + trace.Info(nameof(GetVssCredentials)); + ArgUtil.NotNull(CredentialData, nameof(CredentialData)); + + CredentialData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Url, out string serverUrl); + ArgUtil.NotNullOrEmpty(serverUrl, nameof(serverUrl)); + + var tenantAuthorityUrl = GetTenantAuthorityUrl(context, serverUrl); + if (tenantAuthorityUrl == null) + { + throw new NotSupportedException($"'{serverUrl}' is not backed by Azure Active Directory."); + } + + LoggerCallbackHandler.LogCallback = ((LogLevel level, string message, bool containsPii) => + { + switch (level) + { + case LogLevel.Information: + trace.Info(message); + break; + case LogLevel.Error: + trace.Error(message); + break; + case LogLevel.Warning: + trace.Warning(message); + break; + default: + trace.Verbose(message); + break; + } + }); + + LoggerCallbackHandler.UseDefaultLogging = false; + AuthenticationContext ctx = new AuthenticationContext(tenantAuthorityUrl.AbsoluteUri); + var queryParameters = $"redirect_uri={Uri.EscapeDataString(new Uri(serverUrl).GetLeftPart(UriPartial.Authority))}"; + DeviceCodeResult codeResult = ctx.AcquireDeviceCodeAsync("https://management.core.windows.net/", _azureDevOpsClientId, queryParameters).GetAwaiter().GetResult(); + + var term = context.GetService(); + term.WriteLine($"Please finish AAD device code flow in browser ({codeResult.VerificationUrl}), user code: {codeResult.UserCode}"); + if (string.Equals(CredentialData.Data[Constants.Runner.CommandLine.Flags.LaunchBrowser], bool.TrueString, StringComparison.OrdinalIgnoreCase)) + { + try + { +#if OS_WINDOWS + Process.Start(new ProcessStartInfo() { FileName = codeResult.VerificationUrl, UseShellExecute = true }); +#elif OS_LINUX + Process.Start(new ProcessStartInfo() { FileName = "xdg-open", Arguments = codeResult.VerificationUrl }); +#else + Process.Start(new ProcessStartInfo() { FileName = "open", Arguments = codeResult.VerificationUrl }); +#endif + } + catch (Exception ex) + { + // not able to open browser, ex: xdg-open/open is not installed. + trace.Error(ex); + term.WriteLine($"Fail to open browser. {codeResult.Message}"); + } + } + + AuthenticationResult authResult = ctx.AcquireTokenByDeviceCodeAsync(codeResult).GetAwaiter().GetResult(); + ArgUtil.NotNull(authResult, nameof(authResult)); + trace.Info($"receive AAD auth result with {authResult.AccessTokenType} token"); + + var aadCred = new VssAadCredential(new VssAadToken(authResult)); + VssCredentials creds = new VssCredentials(null, aadCred, CredentialPromptType.DoNotPrompt); + trace.Info("cred created"); + + return creds; + } + + public override void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl) + { + ArgUtil.NotNull(context, nameof(context)); + Tracing trace = context.GetTrace(nameof(AadDeviceCodeAccessToken)); + trace.Info(nameof(EnsureCredential)); + ArgUtil.NotNull(command, nameof(command)); + CredentialData.Data[Constants.Runner.CommandLine.Args.Url] = serverUrl; + CredentialData.Data[Constants.Runner.CommandLine.Flags.LaunchBrowser] = command.GetAutoLaunchBrowser().ToString(); + } + + private Uri GetTenantAuthorityUrl(IHostContext context, string serverUrl) + { + using (var client = new HttpClient(context.CreateHttpClientHandler())) + { + client.DefaultRequestHeaders.Accept.Clear(); + client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + client.DefaultRequestHeaders.Add("X-TFS-FedAuthRedirect", "Suppress"); + client.DefaultRequestHeaders.UserAgent.Clear(); + client.DefaultRequestHeaders.UserAgent.AddRange(VssClientHttpRequestSettings.Default.UserAgent); + var requestMessage = new HttpRequestMessage(HttpMethod.Head, $"{serverUrl.Trim('/')}/_apis/connectiondata"); + var response = client.SendAsync(requestMessage).GetAwaiter().GetResult(); + + // Get the tenant from the Login URL, MSA backed accounts will not return `Bearer` www-authenticate header. + var bearerResult = response.Headers.WwwAuthenticate.Where(p => p.Scheme.Equals("Bearer", StringComparison.OrdinalIgnoreCase)).FirstOrDefault(); + if (bearerResult != null && bearerResult.Parameter.StartsWith("authorization_uri=", StringComparison.OrdinalIgnoreCase)) + { + var authorizationUri = bearerResult.Parameter.Substring("authorization_uri=".Length); + if (Uri.TryCreate(authorizationUri, UriKind.Absolute, out Uri aadTenantUrl)) + { + return aadTenantUrl; + } + } + + return null; + } + } + } + + public sealed class OAuthAccessTokenCredential : CredentialProvider + { + public OAuthAccessTokenCredential() : base(Constants.Configuration.OAuthAccessToken) { } + + public override VssCredentials GetVssCredentials(IHostContext context) + { + ArgUtil.NotNull(context, nameof(context)); + Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential)); + trace.Info(nameof(GetVssCredentials)); + ArgUtil.NotNull(CredentialData, nameof(CredentialData)); + string token; + if (!CredentialData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Token, out token)) + { + token = null; + } + + ArgUtil.NotNullOrEmpty(token, nameof(token)); + + trace.Info("token retrieved: {0} chars", token.Length); + VssCredentials creds = new VssCredentials(null, new VssOAuthAccessTokenCredential(token), CredentialPromptType.DoNotPrompt); + trace.Info("cred created"); + + return creds; + } + + public override void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl) + { + ArgUtil.NotNull(context, nameof(context)); + Tracing trace = context.GetTrace(nameof(OAuthAccessTokenCredential)); + trace.Info(nameof(EnsureCredential)); + ArgUtil.NotNull(command, nameof(command)); + CredentialData.Data[Constants.Runner.CommandLine.Args.Token] = command.GetToken(); + } + } + + public sealed class PersonalAccessToken : CredentialProvider + { + public PersonalAccessToken() : base(Constants.Configuration.PAT) { } + + public override VssCredentials GetVssCredentials(IHostContext context) + { + ArgUtil.NotNull(context, nameof(context)); + Tracing trace = context.GetTrace(nameof(PersonalAccessToken)); + trace.Info(nameof(GetVssCredentials)); + ArgUtil.NotNull(CredentialData, nameof(CredentialData)); + string token; + if (!CredentialData.Data.TryGetValue(Constants.Runner.CommandLine.Args.Token, out token)) + { + token = null; + } + + ArgUtil.NotNullOrEmpty(token, nameof(token)); + + trace.Info("token retrieved: {0} chars", token.Length); + + // PAT uses a basic credential + VssBasicCredential basicCred = new VssBasicCredential("ActionsRunner", token); + VssCredentials creds = new VssCredentials(null, basicCred, CredentialPromptType.DoNotPrompt); + trace.Info("cred created"); + + return creds; + } + + public override void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl) + { + ArgUtil.NotNull(context, nameof(context)); + Tracing trace = context.GetTrace(nameof(PersonalAccessToken)); + trace.Info(nameof(EnsureCredential)); + ArgUtil.NotNull(command, nameof(command)); + CredentialData.Data[Constants.Runner.CommandLine.Args.Token] = command.GetToken(); + } + } +} diff --git a/src/Runner.Listener/Configuration/IRSAKeyManager.cs b/src/Runner.Listener/Configuration/IRSAKeyManager.cs new file mode 100644 index 00000000000..11778e1265a --- /dev/null +++ b/src/Runner.Listener/Configuration/IRSAKeyManager.cs @@ -0,0 +1,108 @@ +using System; +using System.Runtime.Serialization; +using System.Security.Cryptography; +using GitHub.Runner.Common; + +namespace GitHub.Runner.Listener.Configuration +{ + /// + /// Manages an RSA key for the agent using the most appropriate store for the target platform. + /// +#if OS_WINDOWS + [ServiceLocator(Default = typeof(RSAEncryptedFileKeyManager))] +#else + [ServiceLocator(Default = typeof(RSAFileKeyManager))] +#endif + public interface IRSAKeyManager : IRunnerService + { + /// + /// Creates a new RSACryptoServiceProvider instance for the current agent. If a key file is found then the current + /// key is returned to the caller. + /// + /// An RSACryptoServiceProvider instance representing the key for the agent + RSACryptoServiceProvider CreateKey(); + + /// + /// Deletes the RSA key managed by the key manager. + /// + void DeleteKey(); + + /// + /// Gets the RSACryptoServiceProvider instance currently stored by the key manager. + /// + /// An RSACryptoServiceProvider instance representing the key for the agent + /// No key exists in the store + RSACryptoServiceProvider GetKey(); + } + + // Newtonsoft 10 is not working properly with dotnet RSAParameters class + // RSAParameters has fields marked as [NonSerialized] which cause we loss those fields after serialize to JSON + // https://github.com/JamesNK/Newtonsoft.Json/issues/1517 + // https://github.com/dotnet/corefx/issues/23847 + // As workaround, we create our own RSAParameters class without any [NonSerialized] attributes. + [Serializable] + internal class RSAParametersSerializable : ISerializable + { + private RSAParameters _rsaParameters; + + public RSAParameters RSAParameters + { + get + { + return _rsaParameters; + } + } + + public RSAParametersSerializable(RSAParameters rsaParameters) + { + _rsaParameters = rsaParameters; + } + + private RSAParametersSerializable() + { + } + + public byte[] D { get { return _rsaParameters.D; } set { _rsaParameters.D = value; } } + + public byte[] DP { get { return _rsaParameters.DP; } set { _rsaParameters.DP = value; } } + + public byte[] DQ { get { return _rsaParameters.DQ; } set { _rsaParameters.DQ = value; } } + + public byte[] Exponent { get { return _rsaParameters.Exponent; } set { _rsaParameters.Exponent = value; } } + + public byte[] InverseQ { get { return _rsaParameters.InverseQ; } set { _rsaParameters.InverseQ = value; } } + + public byte[] Modulus { get { return _rsaParameters.Modulus; } set { _rsaParameters.Modulus = value; } } + + public byte[] P { get { return _rsaParameters.P; } set { _rsaParameters.P = value; } } + + public byte[] Q { get { return _rsaParameters.Q; } set { _rsaParameters.Q = value; } } + + public RSAParametersSerializable(SerializationInfo information, StreamingContext context) + { + _rsaParameters = new RSAParameters() + { + D = (byte[])information.GetValue("d", typeof(byte[])), + DP = (byte[])information.GetValue("dp", typeof(byte[])), + DQ = (byte[])information.GetValue("dq", typeof(byte[])), + Exponent = (byte[])information.GetValue("exponent", typeof(byte[])), + InverseQ = (byte[])information.GetValue("inverseQ", typeof(byte[])), + Modulus = (byte[])information.GetValue("modulus", typeof(byte[])), + P = (byte[])information.GetValue("p", typeof(byte[])), + Q = (byte[])information.GetValue("q", typeof(byte[])) + }; + } + + public void GetObjectData(SerializationInfo info, StreamingContext context) + { + info.AddValue("d", _rsaParameters.D); + info.AddValue("dp", _rsaParameters.DP); + info.AddValue("dq", _rsaParameters.DQ); + info.AddValue("exponent", _rsaParameters.Exponent); + info.AddValue("inverseQ", _rsaParameters.InverseQ); + info.AddValue("modulus", _rsaParameters.Modulus); + info.AddValue("p", _rsaParameters.P); + info.AddValue("q", _rsaParameters.Q); + } + } +} diff --git a/src/Runner.Listener/Configuration/NativeWindowsServiceHelper.cs b/src/Runner.Listener/Configuration/NativeWindowsServiceHelper.cs new file mode 100644 index 00000000000..21824f99d5a --- /dev/null +++ b/src/Runner.Listener/Configuration/NativeWindowsServiceHelper.cs @@ -0,0 +1,1319 @@ +#if OS_WINDOWS +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Security; +using System.Security.AccessControl; +using System.Security.Principal; +using System.ServiceProcess; +using System.Threading; +using GitHub.Runner.Common.Util; +using Microsoft.Win32; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ + [ServiceLocator(Default = typeof(NativeWindowsServiceHelper))] + public interface INativeWindowsServiceHelper : IRunnerService + { + string GetUniqueRunnerGroupName(); + + bool LocalGroupExists(string groupName); + + void CreateLocalGroup(string groupName); + + void DeleteLocalGroup(string groupName); + + void AddMemberToLocalGroup(string accountName, string groupName); + + void GrantFullControlToGroup(string path, string groupName); + + void RemoveGroupFromFolderSecuritySetting(string folderPath, string groupName); + + bool IsUserHasLogonAsServicePrivilege(string domain, string userName); + + bool GrantUserLogonAsServicePrivilege(string domain, string userName); + + bool IsValidCredential(string domain, string userName, string logonPassword); + + NTAccount GetDefaultServiceAccount(); + + NTAccount GetDefaultAdminServiceAccount(); + + bool IsServiceExists(string serviceName); + + void InstallService(string serviceName, string serviceDisplayName, string logonAccount, string logonPassword); + + void UninstallService(string serviceName); + + void StartService(string serviceName); + + void StopService(string serviceName); + + string GetSecurityId(string domainName, string userName); + + void SetAutoLogonPassword(string password); + + void ResetAutoLogonPassword(); + + bool IsRunningInElevatedMode(); + + void LoadUserProfile(string domain, string userName, string logonPassword, out IntPtr tokenHandle, out PROFILEINFO userProfile); + + void UnloadUserProfile(IntPtr tokenHandle, PROFILEINFO userProfile); + + bool IsValidAutoLogonCredential(string domain, string userName, string logonPassword); + + void GrantDirectoryPermissionForAccount(string accountName, IList folders); + + void RevokeDirectoryPermissionForAccount(IList folders); + } + + public class NativeWindowsServiceHelper : RunnerService, INativeWindowsServiceHelper + { + private const string RunnerServiceLocalGroupPrefix = "GITHUB_ActionsRunner_G"; + private ITerminal _term; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _term = hostContext.GetService(); + } + + public string GetUniqueRunnerGroupName() + { + return RunnerServiceLocalGroupPrefix + IOUtil.GetPathHash(HostContext.GetDirectory(WellKnownDirectory.Bin)).Substring(0, 5); + } + + public bool LocalGroupExists(string groupName) + { + Trace.Entering(); + bool exists = false; + + IntPtr bufptr; + int returnCode = NetLocalGroupGetInfo(null, // computer name + groupName, + 1, // group info with comment + out bufptr); // Win32GroupAPI.LocalGroupInfo + + try + { + switch (returnCode) + { + case ReturnCode.S_OK: + Trace.Info($"Local group '{groupName}' exist."); + exists = true; + break; + + case ReturnCode.NERR_GroupNotFound: + case ReturnCode.ERROR_NO_SUCH_ALIAS: + exists = false; + break; + + case ReturnCode.ERROR_ACCESS_DENIED: + // NOTE: None of the exception thrown here are userName facing. The caller logs this exception and prints a more understandable error + throw new UnauthorizedAccessException("Access Denied"); + + default: + throw new Exception($"Error: Operation {nameof(NetLocalGroupGetInfo)} failed with return code {returnCode}"); + } + } + finally + { + // we don't need to actually read the info to determine whether it exists + int bufferFreeError = NetApiBufferFree(bufptr); + if (bufferFreeError != 0) + { + Trace.Error(StringUtil.Format("Buffer free error, could not free buffer allocated, error code: {0}", bufferFreeError)); + } + } + + return exists; + } + + public void CreateLocalGroup(string groupName) + { + Trace.Entering(); + LocalGroupInfo groupInfo = new LocalGroupInfo(); + groupInfo.Name = groupName; + groupInfo.Comment = StringUtil.Format("Built-in group used by Team Foundation Server."); + + int returnCode = NetLocalGroupAdd(null, // computer name + 1, // 1 means include comment + ref groupInfo, + 0); // param error number + + // return on success + if (returnCode == ReturnCode.S_OK) + { + Trace.Info($"Local Group '{groupName}' created"); + return; + } + + // Error Cases + switch (returnCode) + { + case ReturnCode.NERR_GroupExists: + case ReturnCode.ERROR_ALIAS_EXISTS: + Trace.Info(StringUtil.Format("Group {0} already exists", groupName)); + break; + case ReturnCode.ERROR_ACCESS_DENIED: + throw new UnauthorizedAccessException("Access Denied"); + + case ReturnCode.ERROR_INVALID_PARAMETER: + throw new ArgumentException($"Invalid Group Name - {groupName}"); + + default: + throw new Exception($"Error: Operation {nameof(NetLocalGroupAdd)} failed with return code {returnCode}"); + } + } + + public void DeleteLocalGroup(string groupName) + { + Trace.Entering(); + int returnCode = NetLocalGroupDel(null, // computer name + groupName); + + // return on success + if (returnCode == ReturnCode.S_OK) + { + Trace.Info($"Local Group '{groupName}' deleted"); + return; + } + + // Error Cases + switch (returnCode) + { + case ReturnCode.NERR_GroupNotFound: + case ReturnCode.ERROR_NO_SUCH_ALIAS: + Trace.Info(StringUtil.Format("Group {0} not exists.", groupName)); + break; + + case ReturnCode.ERROR_ACCESS_DENIED: + throw new UnauthorizedAccessException("Access Denied"); + + default: + throw new Exception($"Error: Operation {nameof(NetLocalGroupDel)} failed with return code {returnCode}"); + } + } + + public void AddMemberToLocalGroup(string accountName, string groupName) + { + Trace.Entering(); + LocalGroupMemberInfo memberInfo = new LocalGroupMemberInfo(); + memberInfo.FullName = accountName; + + int returnCode = NetLocalGroupAddMembers(null, // computer name + groupName, + 3, // group info with fullname (vs sid) + ref memberInfo, + 1); //total entries + + // return on success + if (returnCode == ReturnCode.S_OK) + { + Trace.Info($"Account '{accountName}' is added to local group '{groupName}'."); + return; + } + + // Error Cases + switch (returnCode) + { + case ReturnCode.ERROR_MEMBER_IN_ALIAS: + Trace.Info(StringUtil.Format("Account {0} is already member of group {1}", accountName, groupName)); + break; + case ReturnCode.NERR_GroupNotFound: + case ReturnCode.ERROR_NO_SUCH_ALIAS: + throw new ArgumentException($"Group: {groupName} does not Exist"); + + case ReturnCode.ERROR_NO_SUCH_MEMBER: + throw new ArgumentException($"Member: {accountName} does not Exist"); + + case ReturnCode.ERROR_INVALID_MEMBER: + throw new ArgumentException("A new member could not be added to a local group because the member has the wrong account type. If you are configuring on a domain controller, built-in machine accounts cannot be added to local groups. You must use a domain user account instead"); + + case ReturnCode.ERROR_ACCESS_DENIED: + throw new UnauthorizedAccessException("Access Denied"); + + default: + throw new Exception($"Error: Operation {nameof(NetLocalGroupAddMembers)} failed with return code {returnCode}"); + } + } + + public void GrantFullControlToGroup(string path, string groupName) + { + Trace.Entering(); + if (IsGroupHasFullControl(path, groupName)) + { + Trace.Info($"Local group '{groupName}' already has full control to path '{path}'."); + return; + } + + DirectoryInfo dInfo = new DirectoryInfo(path); + DirectorySecurity dSecurity = dInfo.GetAccessControl(); + + if (!dSecurity.AreAccessRulesCanonical) + { + Trace.Warning("Acls are not canonical, this may cause failure"); + } + + dSecurity.AddAccessRule( + new FileSystemAccessRule( + groupName, + FileSystemRights.FullControl, + InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit, + PropagationFlags.None, + AccessControlType.Allow)); + dInfo.SetAccessControl(dSecurity); + } + + private bool IsGroupHasFullControl(string path, string groupName) + { + DirectoryInfo dInfo = new DirectoryInfo(path); + DirectorySecurity dSecurity = dInfo.GetAccessControl(); + + var allAccessRuls = dSecurity.GetAccessRules(true, true, typeof(SecurityIdentifier)).Cast(); + + SecurityIdentifier sid = (SecurityIdentifier)new NTAccount(groupName).Translate(typeof(SecurityIdentifier)); + + if (allAccessRuls.Any(x => x.IdentityReference.Value == sid.ToString() && + x.AccessControlType == AccessControlType.Allow && + x.FileSystemRights.HasFlag(FileSystemRights.FullControl) && + x.InheritanceFlags == (InheritanceFlags.ContainerInherit | InheritanceFlags.ObjectInherit) && + x.PropagationFlags == PropagationFlags.None)) + { + return true; + } + else + { + return false; + } + } + + public bool IsUserHasLogonAsServicePrivilege(string domain, string userName) + { + Trace.Entering(); + + ArgUtil.NotNullOrEmpty(userName, nameof(userName)); + bool userHasPermission = false; + + using (LsaPolicy lsaPolicy = new LsaPolicy()) + { + IntPtr rightsPtr; + uint count; + uint result = LsaEnumerateAccountRights(lsaPolicy.Handle, GetSidBinaryFromWindows(domain, userName), out rightsPtr, out count); + try + { + if (result == 0) + { + IntPtr incrementPtr = rightsPtr; + for (int i = 0; i < count; i++) + { + LSA_UNICODE_STRING nativeRightString = Marshal.PtrToStructure(incrementPtr); + string rightString = Marshal.PtrToStringUni(nativeRightString.Buffer); + Trace.Verbose($"Account {userName} has '{rightString}' right."); + if (string.Equals(rightString, s_logonAsServiceName, StringComparison.OrdinalIgnoreCase)) + { + userHasPermission = true; + } + + incrementPtr += Marshal.SizeOf(nativeRightString); + } + } + else + { + Trace.Error($"Can't enumerate account rights, return code {result}."); + } + } + finally + { + result = LsaFreeMemory(rightsPtr); + if (result != 0) + { + Trace.Error(StringUtil.Format("Failed to free memory from LsaEnumerateAccountRights. Return code : {0} ", result)); + } + } + } + + return userHasPermission; + } + + public bool GrantUserLogonAsServicePrivilege(string domain, string userName) + { + Trace.Entering(); + ArgUtil.NotNullOrEmpty(userName, nameof(userName)); + using (LsaPolicy lsaPolicy = new LsaPolicy()) + { + // STATUS_SUCCESS == 0 + uint result = LsaAddAccountRights(lsaPolicy.Handle, GetSidBinaryFromWindows(domain, userName), LogonAsServiceRights, 1); + if (result == 0) + { + Trace.Info($"Successfully grant logon as service privilege to account '{userName}'"); + return true; + } + else + { + Trace.Info($"Fail to grant logon as service privilege to account '{userName}', error code {result}."); + return false; + } + } + } + + public static bool IsWellKnownIdentity(String accountName) + { + NTAccount ntaccount = new NTAccount(accountName); + SecurityIdentifier sid = (SecurityIdentifier)ntaccount.Translate(typeof(SecurityIdentifier)); + + SecurityIdentifier networkServiceSid = new SecurityIdentifier(WellKnownSidType.NetworkServiceSid, null); + SecurityIdentifier localServiceSid = new SecurityIdentifier(WellKnownSidType.LocalServiceSid, null); + SecurityIdentifier localSystemSid = new SecurityIdentifier(WellKnownSidType.LocalSystemSid, null); + + return sid.Equals(networkServiceSid) || + sid.Equals(localServiceSid) || + sid.Equals(localSystemSid); + } + + public bool IsValidCredential(string domain, string userName, string logonPassword) + { + return IsValidCredentialInternal(domain, userName, logonPassword, LOGON32_LOGON_NETWORK); + } + + public bool IsValidAutoLogonCredential(string domain, string userName, string logonPassword) + { + return IsValidCredentialInternal(domain, userName, logonPassword, LOGON32_LOGON_INTERACTIVE); + } + + public NTAccount GetDefaultServiceAccount() + { + SecurityIdentifier sid = new SecurityIdentifier(WellKnownSidType.NetworkServiceSid, domainSid: null); + NTAccount account = sid.Translate(typeof(NTAccount)) as NTAccount; + + if (account == null) + { + throw new InvalidOperationException("Cannot find network service account"); + } + + return account; + } + + public NTAccount GetDefaultAdminServiceAccount() + { + SecurityIdentifier sid = new SecurityIdentifier(WellKnownSidType.LocalSystemSid, domainSid: null); + NTAccount account = sid.Translate(typeof(NTAccount)) as NTAccount; + + if (account == null) + { + throw new InvalidOperationException("Cannot find local system account"); + } + + return account; + } + + public void RemoveGroupFromFolderSecuritySetting(string folderPath, string groupName) + { + DirectoryInfo dInfo = new DirectoryInfo(folderPath); + if (dInfo.Exists) + { + DirectorySecurity dSecurity = dInfo.GetAccessControl(); + + var allAccessRuls = dSecurity.GetAccessRules(true, true, typeof(SecurityIdentifier)).Cast(); + + SecurityIdentifier sid = (SecurityIdentifier)new NTAccount(groupName).Translate(typeof(SecurityIdentifier)); + + foreach (FileSystemAccessRule ace in allAccessRuls) + { + if (String.Equals(sid.ToString(), ace.IdentityReference.Value, StringComparison.OrdinalIgnoreCase)) + { + dSecurity.RemoveAccessRuleSpecific(ace); + } + } + dInfo.SetAccessControl(dSecurity); + } + } + + public bool IsServiceExists(string serviceName) + { + Trace.Entering(); + ServiceController service = ServiceController.GetServices().FirstOrDefault(x => x.ServiceName.Equals(serviceName, StringComparison.OrdinalIgnoreCase)); + return service != null; + } + + public void InstallService(string serviceName, string serviceDisplayName, string logonAccount, string logonPassword) + { + Trace.Entering(); + + string agentServiceExecutable = "\"" + Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), WindowsServiceControlManager.WindowsServiceControllerName) + "\""; + IntPtr scmHndl = IntPtr.Zero; + IntPtr svcHndl = IntPtr.Zero; + IntPtr tmpBuf = IntPtr.Zero; + IntPtr svcLock = IntPtr.Zero; + + try + { + //invoke the service with special argument, that tells it to register an event log trace source (need to run as an admin) + using (var processInvoker = HostContext.CreateService()) + { + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + _term.WriteLine(message.Data); + }; + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + _term.WriteLine(message.Data); + }; + + processInvoker.ExecuteAsync(workingDirectory: string.Empty, + fileName: agentServiceExecutable, + arguments: "init", + environment: null, + requireExitCodeZero: true, + cancellationToken: CancellationToken.None).GetAwaiter().GetResult(); + } + + Trace.Verbose(StringUtil.Format("Trying to open SCManager.")); + scmHndl = OpenSCManager(null, null, ServiceManagerRights.AllAccess); + if (scmHndl.ToInt64() <= 0) + { + throw new Exception("Failed to Open Service Control Manager"); + } + + Trace.Verbose(StringUtil.Format("Opened SCManager. Trying to create service {0}", serviceName)); + svcHndl = CreateService(scmHndl, + serviceName, + serviceDisplayName, + ServiceRights.AllAccess, + SERVICE_WIN32_OWN_PROCESS, + ServiceBootFlag.AutoStart, + ServiceError.Normal, + agentServiceExecutable, + null, + IntPtr.Zero, + null, + logonAccount, + logonPassword); + if (svcHndl.ToInt64() <= 0) + { + throw new InvalidOperationException($"Error: Operation {nameof(CreateService)} failed with return code {GetLastError()}"); + } + + _term.WriteLine($"Service {serviceName} successfully installed"); + + //set recovery option to restart on failure. + ArrayList failureActions = new ArrayList(); + //first failure, we will restart the service right away. + failureActions.Add(new FailureAction(RecoverAction.Restart, 0)); + //second failure, we will restart the service after 1 min. + failureActions.Add(new FailureAction(RecoverAction.Restart, 60000)); + //subsequent failures, we will restart the service after 1 min + failureActions.Add(new FailureAction(RecoverAction.Restart, 60000)); + + // Lock the Service Database + svcLock = LockServiceDatabase(scmHndl); + if (svcLock.ToInt64() <= 0) + { + throw new Exception("Failed to Lock Service Database for Write"); + } + + int[] actions = new int[failureActions.Count * 2]; + int currInd = 0; + foreach (FailureAction fa in failureActions) + { + actions[currInd] = (int)fa.Type; + actions[++currInd] = fa.Delay; + currInd++; + } + + // Need to pack 8 bytes per struct + tmpBuf = Marshal.AllocHGlobal(failureActions.Count * 8); + // Move array into marshallable pointer + Marshal.Copy(actions, 0, tmpBuf, failureActions.Count * 2); + + // Change service error actions + // Set the SERVICE_FAILURE_ACTIONS struct + SERVICE_FAILURE_ACTIONS sfa = new SERVICE_FAILURE_ACTIONS(); + sfa.cActions = failureActions.Count; + sfa.dwResetPeriod = SERVICE_NO_CHANGE; + sfa.lpCommand = String.Empty; + sfa.lpRebootMsg = String.Empty; + sfa.lpsaActions = tmpBuf.ToInt64(); + + // Call the ChangeServiceFailureActions() abstraction of ChangeServiceConfig2() + bool falureActionsResult = ChangeServiceFailureActions(svcHndl, SERVICE_CONFIG_FAILURE_ACTIONS, ref sfa); + //Check the return + if (!falureActionsResult) + { + int lastErrorCode = (int)GetLastError(); + Exception win32exception = new Win32Exception(lastErrorCode); + if (lastErrorCode == ReturnCode.ERROR_ACCESS_DENIED) + { + throw new SecurityException("Access Denied while setting service recovery options.", win32exception); + } + else + { + throw win32exception; + } + } + else + { + _term.WriteLine($"Service {serviceName} successfully set recovery option"); + } + + // Change service to delayed auto start + SERVICE_DELAYED_AUTO_START_INFO sdasi = new SERVICE_DELAYED_AUTO_START_INFO(); + sdasi.fDelayedAutostart = true; + + // Call the ChangeServiceDelayedAutoStart() abstraction of ChangeServiceConfig2() + bool delayedStartResult = ChangeServiceDelayedAutoStart(svcHndl, SERVICE_CONFIG_DELAYED_AUTO_START_INFO, ref sdasi); + //Check the return + if (!delayedStartResult) + { + int lastErrorCode = (int)GetLastError(); + Exception win32exception = new Win32Exception(lastErrorCode); + if (lastErrorCode == ReturnCode.ERROR_ACCESS_DENIED) + { + throw new SecurityException("Access Denied while setting service delayed auto start options.", win32exception); + } + else + { + throw win32exception; + } + } + else + { + _term.WriteLine($"Service {serviceName} successfully set to delayed auto start"); + } + + _term.WriteLine($"Service {serviceName} successfully configured"); + } + finally + { + if (scmHndl != IntPtr.Zero) + { + // Unlock the service database + if (svcLock != IntPtr.Zero) + { + UnlockServiceDatabase(svcLock); + svcLock = IntPtr.Zero; + } + + // Close the service control manager handle + CloseServiceHandle(scmHndl); + scmHndl = IntPtr.Zero; + } + + // Close the service handle + if (svcHndl != IntPtr.Zero) + { + CloseServiceHandle(svcHndl); + svcHndl = IntPtr.Zero; + } + + // Free the memory + if (tmpBuf != IntPtr.Zero) + { + Marshal.FreeHGlobal(tmpBuf); + tmpBuf = IntPtr.Zero; + } + } + } + + public void UninstallService(string serviceName) + { + Trace.Entering(); + Trace.Verbose(StringUtil.Format("Trying to open SCManager.")); + IntPtr scmHndl = OpenSCManager(null, null, ServiceManagerRights.Connect); + + if (scmHndl.ToInt64() <= 0) + { + throw new Exception("Failed to Open Service Control Manager"); + } + + try + { + Trace.Verbose(StringUtil.Format("Opened SCManager. query installed service {0}", serviceName)); + IntPtr serviceHndl = OpenService(scmHndl, + serviceName, + ServiceRights.StandardRightsRequired | ServiceRights.Stop | ServiceRights.QueryStatus); + + if (serviceHndl == IntPtr.Zero) + { + int lastError = Marshal.GetLastWin32Error(); + throw new Win32Exception(lastError); + } + + try + { + Trace.Info(StringUtil.Format("Trying to delete service {0}", serviceName)); + int result = DeleteService(serviceHndl); + if (result == 0) + { + result = Marshal.GetLastWin32Error(); + throw new Win32Exception(result, $"Could not delete service '{serviceName}'"); + } + + Trace.Info("successfully removed the service"); + } + finally + { + CloseServiceHandle(serviceHndl); + } + } + finally + { + CloseServiceHandle(scmHndl); + } + } + + public void StartService(string serviceName) + { + Trace.Entering(); + try + { + ServiceController service = ServiceController.GetServices().FirstOrDefault(x => x.ServiceName.Equals(serviceName, StringComparison.OrdinalIgnoreCase)); + if (service != null) + { + service.Start(); + _term.WriteLine($"Service {serviceName} started successfully"); + } + else + { + throw new InvalidOperationException($"Cannot find service {serviceName}"); + } + } + catch (Exception exception) + { + Trace.Error(exception); + _term.WriteError("Cannot start the service. Check the logs for more details."); + + // This is the last step in the configuration. Even if the start failed the status of the configuration should be error + // If its configured through scripts its mandatory we indicate the failure where configuration failed to start the service + throw; + } + } + + public void StopService(string serviceName) + { + Trace.Entering(); + try + { + ServiceController service = ServiceController.GetServices().FirstOrDefault(x => x.ServiceName.Equals(serviceName, StringComparison.OrdinalIgnoreCase)); + if (service != null) + { + if (service.Status == ServiceControllerStatus.Running) + { + Trace.Info("Trying to stop the service"); + service.Stop(); + + try + { + _term.WriteLine("Waiting for service to stop..."); + service.WaitForStatus(ServiceControllerStatus.Stopped, TimeSpan.FromSeconds(35)); + } + catch (System.ServiceProcess.TimeoutException) + { + throw new InvalidOperationException($"Cannot stop the service {serviceName} in a timely fashion."); + } + } + + Trace.Info("Successfully stopped the service"); + } + else + { + Trace.Info($"Cannot find service {serviceName}"); + } + } + catch (Exception exception) + { + Trace.Error(exception); + _term.WriteError($"Cannot stop the service {serviceName} in a timely fashion."); + + // Log the exception but do not report it as error. We can try uninstalling the service and then report it as error if something goes wrong. + } + } + + public string GetSecurityId(string domainName, string userName) + { + var account = new NTAccount(domainName, userName); + var sid = account.Translate(typeof(SecurityIdentifier)); + return sid != null ? sid.ToString() : null; + } + + public void SetAutoLogonPassword(string password) + { + using (LsaPolicy lsaPolicy = new LsaPolicy(LSA_AccessPolicy.POLICY_CREATE_SECRET)) + { + lsaPolicy.SetSecretData(LsaPolicy.DefaultPassword, password); + } + } + + public void ResetAutoLogonPassword() + { + using (LsaPolicy lsaPolicy = new LsaPolicy(LSA_AccessPolicy.POLICY_CREATE_SECRET)) + { + lsaPolicy.SetSecretData(LsaPolicy.DefaultPassword, null); + } + } + + public bool IsRunningInElevatedMode() + { + return new WindowsPrincipal(WindowsIdentity.GetCurrent()).IsInRole(WindowsBuiltInRole.Administrator); + } + + public void LoadUserProfile(string domain, string userName, string logonPassword, out IntPtr tokenHandle, out PROFILEINFO userProfile) + { + Trace.Entering(); + tokenHandle = IntPtr.Zero; + + ArgUtil.NotNullOrEmpty(userName, nameof(userName)); + if (LogonUser(userName, domain, logonPassword, LOGON32_LOGON_INTERACTIVE, LOGON32_PROVIDER_DEFAULT, out tokenHandle) == 0) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + userProfile = new PROFILEINFO(); + userProfile.dwSize = Marshal.SizeOf(typeof(PROFILEINFO)); + userProfile.lpUserName = userName; + if (!LoadUserProfile(tokenHandle, ref userProfile)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + Trace.Info($"Successfully loaded the profile for {domain}\\{userName}."); + } + + public void UnloadUserProfile(IntPtr tokenHandle, PROFILEINFO userProfile) + { + Trace.Entering(); + + if (tokenHandle == IntPtr.Zero) + { + Trace.Verbose("The handle to unload user profile is not set. Returning."); + } + + if (!UnloadUserProfile(tokenHandle, userProfile.hProfile)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + Trace.Info($"Successfully unloaded the profile for {userProfile.lpUserName}."); + } + + public void GrantDirectoryPermissionForAccount(string accountName, IList folders) + { + Trace.Entering(); + string groupName = GetUniqueRunnerGroupName(); + Trace.Info(StringUtil.Format("Calculated unique group name {0}", groupName)); + + if (!LocalGroupExists(groupName)) + { + Trace.Info(StringUtil.Format("Trying to create group {0}", groupName)); + CreateLocalGroup(groupName); + } + + Trace.Info(StringUtil.Format("Trying to add userName {0} to the group {1}", accountName, groupName)); + AddMemberToLocalGroup(accountName, groupName); + + // grant permssion for folders + foreach(var folder in folders) + { + if (Directory.Exists(folder)) + { + Trace.Info(StringUtil.Format("Set full access control to group for the folder {0}", folder)); + GrantFullControlToGroup(folder, groupName); + } + } + } + + public void RevokeDirectoryPermissionForAccount(IList folders) + { + Trace.Entering(); + string groupName = GetUniqueRunnerGroupName(); + Trace.Info(StringUtil.Format("Calculated unique group name {0}", groupName)); + + // remove the group from folders + foreach(var folder in folders) + { + if (Directory.Exists(folder)) + { + Trace.Info(StringUtil.Format($"Remove the group {groupName} for the folder {folder}.")); + try + { + RemoveGroupFromFolderSecuritySetting(folder, groupName); + } + catch(Exception ex) + { + Trace.Error(ex); + } + } + } + + //delete group + Trace.Info(StringUtil.Format($"Delete the group {groupName}.")); + DeleteLocalGroup(groupName); + } + + private bool IsValidCredentialInternal(string domain, string userName, string logonPassword, UInt32 logonType) + { + Trace.Entering(); + IntPtr tokenHandle = IntPtr.Zero; + + ArgUtil.NotNullOrEmpty(userName, nameof(userName)); + + Trace.Info($"Verify credential for account {userName}."); + int result = LogonUser(userName, domain, logonPassword, logonType, LOGON32_PROVIDER_DEFAULT, out tokenHandle); + + if (tokenHandle.ToInt32() != 0) + { + if (!CloseHandle(tokenHandle)) + { + Trace.Error("Failed during CloseHandle on token from LogonUser"); + } + } + + if (result != 0) + { + Trace.Info($"Credential for account '{userName}' is valid."); + return true; + } + else + { + Trace.Info($"Credential for account '{userName}' is invalid."); + return false; + } + } + + private byte[] GetSidBinaryFromWindows(string domain, string user) + { + try + { + SecurityIdentifier sid = (SecurityIdentifier)new NTAccount(StringUtil.Format("{0}\\{1}", domain, user).TrimStart('\\')).Translate(typeof(SecurityIdentifier)); + byte[] binaryForm = new byte[sid.BinaryLength]; + sid.GetBinaryForm(binaryForm, 0); + return binaryForm; + } + catch (Exception exception) + { + Trace.Error(exception); + return null; + } + } + + // Helper class not to repeat whenever we deal with LSA* api + internal class LsaPolicy : IDisposable + { + public IntPtr Handle { get; set; } + + public LsaPolicy() + : this(LSA_AccessPolicy.POLICY_ALL_ACCESS) + { + } + + public LsaPolicy(LSA_AccessPolicy access) + { + LSA_UNICODE_STRING system = new LSA_UNICODE_STRING(); + LSA_OBJECT_ATTRIBUTES attrib = new LSA_OBJECT_ATTRIBUTES() + { + Length = 0, + RootDirectory = IntPtr.Zero, + Attributes = 0, + SecurityDescriptor = IntPtr.Zero, + SecurityQualityOfService = IntPtr.Zero, + }; + + IntPtr handle = IntPtr.Zero; + uint hr = LsaOpenPolicy(ref system, ref attrib, (uint)access, out handle); + if (hr != 0 || handle == IntPtr.Zero) + { + throw new Exception($"Error: Operation {nameof(LsaOpenPolicy)} failed with return code {hr}"); + } + + Handle = handle; + } + + public void SetSecretData(string key, string value) + { + LSA_UNICODE_STRING secretData = new LSA_UNICODE_STRING(); + LSA_UNICODE_STRING secretName = new LSA_UNICODE_STRING(); + + secretName.Buffer = Marshal.StringToHGlobalUni(key); + + var charSize = sizeof(char); + + secretName.Length = (UInt16)(key.Length * charSize); + secretName.MaximumLength = (UInt16)((key.Length + 1) * charSize); + + if (value != null && value.Length > 0) + { + // Create data and key + secretData.Buffer = Marshal.StringToHGlobalUni(value); + secretData.Length = (UInt16)(value.Length * charSize); + secretData.MaximumLength = (UInt16)((value.Length + 1) * charSize); + } + else + { + // Delete data and key + secretData.Buffer = IntPtr.Zero; + secretData.Length = 0; + secretData.MaximumLength = 0; + } + + uint result = LsaStorePrivateData(Handle, ref secretName, ref secretData); + uint winErrorCode = LsaNtStatusToWinError(result); + if (winErrorCode != 0) + { + throw new Exception($"Error: Operation {nameof(LsaNtStatusToWinError)} failed with return code {winErrorCode}"); + } + } + + void IDisposable.Dispose() + { + // We will ignore LsaClose error + LsaClose(Handle); + GC.SuppressFinalize(this); + } + + internal static string DefaultPassword = "DefaultPassword"; + } + + internal enum LSA_AccessPolicy : long + { + POLICY_VIEW_LOCAL_INFORMATION = 0x00000001L, + POLICY_VIEW_AUDIT_INFORMATION = 0x00000002L, + POLICY_GET_PRIVATE_INFORMATION = 0x00000004L, + POLICY_TRUST_ADMIN = 0x00000008L, + POLICY_CREATE_ACCOUNT = 0x00000010L, + POLICY_CREATE_SECRET = 0x00000020L, + POLICY_CREATE_PRIVILEGE = 0x00000040L, + POLICY_SET_DEFAULT_QUOTA_LIMITS = 0x00000080L, + POLICY_SET_AUDIT_REQUIREMENTS = 0x00000100L, + POLICY_AUDIT_LOG_ADMIN = 0x00000200L, + POLICY_SERVER_ADMIN = 0x00000400L, + POLICY_LOOKUP_NAMES = 0x00000800L, + POLICY_NOTIFICATION = 0x00001000L, + POLICY_ALL_ACCESS = 0x00001FFFL + } + + [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] + public static extern uint LsaStorePrivateData( + IntPtr policyHandle, + ref LSA_UNICODE_STRING KeyName, + ref LSA_UNICODE_STRING PrivateData + ); + + [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] + public static extern uint LsaNtStatusToWinError( + uint status + ); + + private static UInt32 LOGON32_LOGON_INTERACTIVE = 2; + private const UInt32 LOGON32_LOGON_NETWORK = 3; + + // Declaration of external pinvoke functions + private static readonly string s_logonAsServiceName = "SeServiceLogonRight"; + + private const UInt32 LOGON32_PROVIDER_DEFAULT = 0; + + private const int SERVICE_WIN32_OWN_PROCESS = 0x00000010; + private const int SERVICE_NO_CHANGE = -1; + private const int SERVICE_CONFIG_FAILURE_ACTIONS = 0x2; + private const int SERVICE_CONFIG_DELAYED_AUTO_START_INFO = 0x3; + + // TODO Fix this. This is not yet available in coreclr (newer version?) + private const int UnicodeCharSize = 2; + + private static LSA_UNICODE_STRING[] LogonAsServiceRights + { + get + { + return new[] + { + new LSA_UNICODE_STRING() + { + Buffer = Marshal.StringToHGlobalUni(s_logonAsServiceName), + Length = (UInt16)(s_logonAsServiceName.Length * UnicodeCharSize), + MaximumLength = (UInt16) ((s_logonAsServiceName.Length + 1) * UnicodeCharSize) + } + }; + } + } + + public struct ReturnCode + { + public const int S_OK = 0; + public const int ERROR_ACCESS_DENIED = 5; + public const int ERROR_INVALID_PARAMETER = 87; + public const int ERROR_MEMBER_NOT_IN_ALIAS = 1377; // member not in a group + public const int ERROR_MEMBER_IN_ALIAS = 1378; // member already exists + public const int ERROR_ALIAS_EXISTS = 1379; // group already exists + public const int ERROR_NO_SUCH_ALIAS = 1376; + public const int ERROR_NO_SUCH_MEMBER = 1387; + public const int ERROR_INVALID_MEMBER = 1388; + public const int NERR_GroupNotFound = 2220; + public const int NERR_GroupExists = 2223; + public const int NERR_UserInGroup = 2236; + public const uint STATUS_ACCESS_DENIED = 0XC0000022; //NTSTATUS error code: Access Denied + } + + [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] + public struct LocalGroupInfo + { + [MarshalAs(UnmanagedType.LPWStr)] + public string Name; + [MarshalAs(UnmanagedType.LPWStr)] + public string Comment; + } + + [StructLayout(LayoutKind.Sequential)] + public struct LSA_UNICODE_STRING + { + public UInt16 Length; + public UInt16 MaximumLength; + + // We need to use an IntPtr because if we wrap the Buffer with a SafeHandle-derived class, we get a failure during LsaAddAccountRights + public IntPtr Buffer; + } + + [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] + public struct LocalGroupMemberInfo + { + [MarshalAs(UnmanagedType.LPWStr)] + public string FullName; + } + + [StructLayout(LayoutKind.Sequential)] + public struct LSA_OBJECT_ATTRIBUTES + { + public UInt32 Length; + public IntPtr RootDirectory; + public LSA_UNICODE_STRING ObjectName; + public UInt32 Attributes; + public IntPtr SecurityDescriptor; + public IntPtr SecurityQualityOfService; + } + + [StructLayout(LayoutKind.Sequential)] + public struct SERVICE_FAILURE_ACTIONS + { + public int dwResetPeriod; + public string lpRebootMsg; + public string lpCommand; + public int cActions; + public long lpsaActions; + } + + [StructLayout(LayoutKind.Sequential)] + public struct SERVICE_DELAYED_AUTO_START_INFO + { + public bool fDelayedAutostart; + } + + // Class to represent a failure action which consists of a recovery + // action type and an action delay + private class FailureAction + { + // Property to set recover action type + public RecoverAction Type { get; set; } + // Property to set recover action delay + public int Delay { get; set; } + + // Constructor + public FailureAction(RecoverAction actionType, int actionDelay) + { + Type = actionType; + Delay = actionDelay; + } + } + + [Flags] + public enum ServiceManagerRights + { + Connect = 0x0001, + CreateService = 0x0002, + EnumerateService = 0x0004, + Lock = 0x0008, + QueryLockStatus = 0x0010, + ModifyBootConfig = 0x0020, + StandardRightsRequired = 0xF0000, + AllAccess = + (StandardRightsRequired | Connect | CreateService | EnumerateService | Lock | QueryLockStatus + | ModifyBootConfig) + } + + [Flags] + public enum ServiceRights + { + QueryConfig = 0x1, + ChangeConfig = 0x2, + QueryStatus = 0x4, + EnumerateDependants = 0x8, + Start = 0x10, + Stop = 0x20, + PauseContinue = 0x40, + Interrogate = 0x80, + UserDefinedControl = 0x100, + Delete = 0x00010000, + StandardRightsRequired = 0xF0000, + AllAccess = + (StandardRightsRequired | QueryConfig | ChangeConfig | QueryStatus | EnumerateDependants | Start | Stop + | PauseContinue | Interrogate | UserDefinedControl) + } + + public enum ServiceError + { + Ignore = 0x00000000, + Normal = 0x00000001, + Severe = 0x00000002, + Critical = 0x00000003 + } + + public enum ServiceBootFlag + { + Start = 0x00000000, + SystemStart = 0x00000001, + AutoStart = 0x00000002, + DemandStart = 0x00000003, + Disabled = 0x00000004 + } + + // Enum for recovery actions (correspond to the Win32 equivalents ) + private enum RecoverAction + { + None = 0, + Restart = 1, + Reboot = 2, + RunCommand = 3 + } + + [DllImport("Netapi32.dll")] + private extern static int NetLocalGroupGetInfo(string servername, + string groupname, + int level, + out IntPtr bufptr); + + [DllImport("Netapi32.dll")] + private extern static int NetApiBufferFree(IntPtr Buffer); + + + [DllImport("Netapi32.dll")] + private extern static int NetLocalGroupAdd([MarshalAs(UnmanagedType.LPWStr)] string servername, + int level, + ref LocalGroupInfo buf, + int parm_err); + + [DllImport("Netapi32.dll")] + private extern static int NetLocalGroupAddMembers([MarshalAs(UnmanagedType.LPWStr)] string serverName, + [MarshalAs(UnmanagedType.LPWStr)] string groupName, + int level, + ref LocalGroupMemberInfo buf, + int totalEntries); + + [DllImport("Netapi32.dll")] + public extern static int NetLocalGroupDel([MarshalAs(UnmanagedType.LPWStr)] string servername, [MarshalAs(UnmanagedType.LPWStr)] string groupname); + + [DllImport("advapi32.dll")] + private static extern Int32 LsaClose(IntPtr ObjectHandle); + + [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] + private static extern uint LsaOpenPolicy( + ref LSA_UNICODE_STRING SystemName, + ref LSA_OBJECT_ATTRIBUTES ObjectAttributes, + uint DesiredAccess, + out IntPtr PolicyHandle); + + [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] + private static extern uint LsaAddAccountRights( + IntPtr PolicyHandle, + byte[] AccountSid, + LSA_UNICODE_STRING[] UserRights, + uint CountOfRights); + + [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] + public static extern uint LsaEnumerateAccountRights( + IntPtr PolicyHandle, + byte[] AccountSid, + out IntPtr UserRights, + out uint CountOfRights); + + [DllImport("advapi32.dll", SetLastError = true, PreserveSig = true)] + public static extern uint LsaFreeMemory(IntPtr pBuffer); + + [DllImport("advapi32.dll", CharSet = CharSet.Unicode, SetLastError = true)] + public static extern int LogonUser(string userName, string domain, string password, uint logonType, uint logonProvider, out IntPtr tokenHandle); + + [DllImport("userenv.dll", SetLastError = true, CharSet = CharSet.Unicode)] + public static extern Boolean LoadUserProfile(IntPtr hToken, ref PROFILEINFO lpProfileInfo); + + [DllImport("userenv.dll", SetLastError = true, CharSet = CharSet.Unicode)] + public static extern Boolean UnloadUserProfile(IntPtr hToken, IntPtr hProfile); + + [DllImport("kernel32", SetLastError = true)] + public static extern bool CloseHandle(IntPtr handle); + + [DllImport("advapi32.dll", EntryPoint = "CreateServiceA")] + private static extern IntPtr CreateService( + IntPtr hSCManager, + string lpServiceName, + string lpDisplayName, + ServiceRights dwDesiredAccess, + int dwServiceType, + ServiceBootFlag dwStartType, + ServiceError dwErrorControl, + string lpBinaryPathName, + string lpLoadOrderGroup, + IntPtr lpdwTagId, + string lpDependencies, + string lp, + string lpPassword); + + [DllImport("advapi32.dll")] + public static extern IntPtr OpenSCManager(string lpMachineName, string lpDatabaseName, ServiceManagerRights dwDesiredAccess); + + [DllImport("advapi32.dll", SetLastError = true)] + public static extern IntPtr OpenService(IntPtr hSCManager, string lpServiceName, ServiceRights dwDesiredAccess); + + [DllImport("advapi32.dll", SetLastError = true)] + public static extern int DeleteService(IntPtr hService); + + [DllImport("advapi32.dll")] + public static extern int CloseServiceHandle(IntPtr hSCObject); + + [DllImport("advapi32.dll")] + public static extern IntPtr LockServiceDatabase(IntPtr hSCManager); + + [DllImport("advapi32.dll")] + public static extern bool UnlockServiceDatabase(IntPtr hSCManager); + + [DllImport("advapi32.dll", EntryPoint = "ChangeServiceConfig2")] + public static extern bool ChangeServiceFailureActions(IntPtr hService, int dwInfoLevel, ref SERVICE_FAILURE_ACTIONS lpInfo); + + [DllImport("advapi32.dll", EntryPoint = "ChangeServiceConfig2")] + public static extern bool ChangeServiceDelayedAutoStart(IntPtr hService, int dwInfoLevel, ref SERVICE_DELAYED_AUTO_START_INFO lpInfo); + + [DllImport("kernel32.dll")] + static extern uint GetLastError(); + } + + [StructLayout(LayoutKind.Sequential)] + public struct PROFILEINFO + { + public int dwSize; + public int dwFlags; + [MarshalAs(UnmanagedType.LPTStr)] + public String lpUserName; + [MarshalAs(UnmanagedType.LPTStr)] + public String lpProfilePath; + [MarshalAs(UnmanagedType.LPTStr)] + public String lpDefaultPath; + [MarshalAs(UnmanagedType.LPTStr)] + public String lpServerName; + [MarshalAs(UnmanagedType.LPTStr)] + public String lpPolicyPath; + public IntPtr hProfile; + } +} +#endif diff --git a/src/Runner.Listener/Configuration/OAuthCredential.cs b/src/Runner.Listener/Configuration/OAuthCredential.cs new file mode 100644 index 00000000000..fb303836cf9 --- /dev/null +++ b/src/Runner.Listener/Configuration/OAuthCredential.cs @@ -0,0 +1,49 @@ +using System; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using GitHub.Services.Common; +using GitHub.Services.OAuth; +using GitHub.Services.WebApi; + +namespace GitHub.Runner.Listener.Configuration +{ + public class OAuthCredential : CredentialProvider + { + public OAuthCredential() + : base(Constants.Configuration.OAuth) + { + } + + public override void EnsureCredential( + IHostContext context, + CommandSettings command, + String serverUrl) + { + // Nothing to verify here + } + + public override VssCredentials GetVssCredentials(IHostContext context) + { + var clientId = this.CredentialData.Data.GetValueOrDefault("clientId", null); + var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null); + + // For back compat with .credential file that doesn't has 'oauthEndpointUrl' section + var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl); + + ArgUtil.NotNullOrEmpty(clientId, nameof(clientId)); + ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl)); + + // We expect the key to be in the machine store at this point. Configuration should have set all of + // this up correctly so we can use the key to generate access tokens. + var keyManager = context.GetService(); + var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey()); + var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials); + var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential); + + // Construct a credentials cache with a single OAuth credential for communication. The windows credential + // is explicitly set to null to ensure we never do that negotiation. + return new VssCredentials(null, agentCredential, CredentialPromptType.DoNotPrompt); + } + } +} diff --git a/src/Runner.Listener/Configuration/OsxServiceControlManager.cs b/src/Runner.Listener/Configuration/OsxServiceControlManager.cs new file mode 100644 index 00000000000..24cff481df0 --- /dev/null +++ b/src/Runner.Listener/Configuration/OsxServiceControlManager.cs @@ -0,0 +1,59 @@ +#if OS_OSX +using System; +using System.IO; +using System.Collections.Generic; +using System.Linq; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ + public class OsxServiceControlManager : ServiceControlManager, ILinuxServiceControlManager + { + // This is the name you would see when you do `systemctl list-units | grep runner` + private const string _svcNamePattern = "actions.runner.{0}.{1}.{2}"; + private const string _svcDisplayPattern = "GitHub Actions Runner ({0}.{1}.{2})"; + private const string _shTemplate = "darwin.svc.sh.template"; + private const string _svcShName = "svc.sh"; + + public void GenerateScripts(RunnerSettings settings) + { + Trace.Entering(); + + string serviceName; + string serviceDisplayName; + CalculateServiceName(settings, _svcNamePattern, _svcDisplayPattern, out serviceName, out serviceDisplayName); + + try + { + string svcShPath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), _svcShName); + + // TODO: encoding? + // TODO: Loc strings formatted into MSG_xxx vars in shellscript + string svcShContent = File.ReadAllText(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), _shTemplate)); + var tokensToReplace = new Dictionary + { + { "{{SvcDescription}}", serviceDisplayName }, + { "{{SvcNameVar}}", serviceName } + }; + + svcShContent = tokensToReplace.Aggregate( + svcShContent, + (current, item) => current.Replace(item.Key, item.Value)); + + //TODO: encoding? + File.WriteAllText(svcShPath, svcShContent); + + var unixUtil = HostContext.CreateService(); + unixUtil.ChmodAsync("755", svcShPath).GetAwaiter().GetResult(); + } + catch (Exception e) + { + Trace.Error(e); + throw; + } + } + } +} +#endif diff --git a/src/Runner.Listener/Configuration/PromptManager.cs b/src/Runner.Listener/Configuration/PromptManager.cs new file mode 100644 index 00000000000..977786c231d --- /dev/null +++ b/src/Runner.Listener/Configuration/PromptManager.cs @@ -0,0 +1,117 @@ +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; + +namespace GitHub.Runner.Listener.Configuration +{ + [ServiceLocator(Default = typeof(PromptManager))] + public interface IPromptManager : IRunnerService + { + bool ReadBool( + string argName, + string description, + bool defaultValue, + bool unattended); + + string ReadValue( + string argName, + string description, + bool secret, + string defaultValue, + Func validator, + bool unattended); + } + + public sealed class PromptManager : RunnerService, IPromptManager + { + private ITerminal _terminal; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _terminal = HostContext.GetService(); + } + + public bool ReadBool( + string argName, + string description, + bool defaultValue, + bool unattended) + { + string answer = ReadValue( + argName: argName, + description: description, + secret: false, + defaultValue: defaultValue ? "Y" : "N", + validator: Validators.BoolValidator, + unattended: unattended); + return String.Equals(answer, "true", StringComparison.OrdinalIgnoreCase) || + String.Equals(answer, "Y", StringComparison.CurrentCultureIgnoreCase); + } + + public string ReadValue( + string argName, + string description, + bool secret, + string defaultValue, + Func validator, + bool unattended) + { + Trace.Info(nameof(ReadValue)); + ArgUtil.NotNull(validator, nameof(validator)); + string value = string.Empty; + + // Check if unattended. + if (unattended) + { + // Return the default value if specified. + if (!string.IsNullOrEmpty(defaultValue)) + { + return defaultValue; + } + + // Otherwise throw. + throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration."); + } + + // Prompt until a valid value is read. + while (true) + { + // Write the message prompt. + _terminal.Write($"{description} ", ConsoleColor.White); + + if(!string.IsNullOrEmpty(defaultValue)) + { + _terminal.Write($"[press Enter for {defaultValue}] "); + } + + // Read and trim the value. + value = secret ? _terminal.ReadSecret() : _terminal.ReadLine(); + value = value?.Trim() ?? string.Empty; + + // Return the default if not specified. + if (string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(defaultValue)) + { + Trace.Info($"Falling back to the default: '{defaultValue}'"); + return defaultValue; + } + + // Return the value if it is not empty and it is valid. + // Otherwise try the loop again. + if (!string.IsNullOrEmpty(value)) + { + if (validator(value)) + { + return value; + } + else + { + Trace.Info("Invalid value."); + _terminal.WriteLine("Entered value is invalid", ConsoleColor.Yellow); + } + } + } + } + } +} diff --git a/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs b/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs new file mode 100644 index 00000000000..8401ac09de0 --- /dev/null +++ b/src/Runner.Listener/Configuration/RSAEncryptedFileKeyManager.cs @@ -0,0 +1,87 @@ +#if OS_WINDOWS +using System.IO; +using System.Security.Cryptography; +using System.Text; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ + public class RSAEncryptedFileKeyManager : RunnerService, IRSAKeyManager + { + private string _keyFile; + private IHostContext _context; + + public RSACryptoServiceProvider CreateKey() + { + RSACryptoServiceProvider rsa = null; + if (!File.Exists(_keyFile)) + { + Trace.Info("Creating new RSA key using 2048-bit key length"); + + rsa = new RSACryptoServiceProvider(2048); + + // Now write the parameters to disk + SaveParameters(rsa.ExportParameters(true)); + Trace.Info("Successfully saved RSA key parameters to file {0}", _keyFile); + } + else + { + Trace.Info("Found existing RSA key parameters file {0}", _keyFile); + + rsa = new RSACryptoServiceProvider(); + rsa.ImportParameters(LoadParameters()); + } + + return rsa; + } + + public void DeleteKey() + { + if (File.Exists(_keyFile)) + { + Trace.Info("Deleting RSA key parameters file {0}", _keyFile); + File.Delete(_keyFile); + } + } + + public RSACryptoServiceProvider GetKey() + { + if (!File.Exists(_keyFile)) + { + throw new CryptographicException($"RSA key file {_keyFile} was not found"); + } + + Trace.Info("Loading RSA key parameters from file {0}", _keyFile); + + var rsa = new RSACryptoServiceProvider(); + rsa.ImportParameters(LoadParameters()); + return rsa; + } + + private RSAParameters LoadParameters() + { + var encryptedBytes = File.ReadAllBytes(_keyFile); + var parametersString = Encoding.UTF8.GetString(ProtectedData.Unprotect(encryptedBytes, null, DataProtectionScope.LocalMachine)); + return StringUtil.ConvertFromJson(parametersString).RSAParameters; + } + + private void SaveParameters(RSAParameters parameters) + { + var parametersString = StringUtil.ConvertToJson(new RSAParametersSerializable(parameters)); + var encryptedBytes = ProtectedData.Protect(Encoding.UTF8.GetBytes(parametersString), null, DataProtectionScope.LocalMachine); + File.WriteAllBytes(_keyFile, encryptedBytes); + File.SetAttributes(_keyFile, File.GetAttributes(_keyFile) | FileAttributes.Hidden); + } + + void IRunnerService.Initialize(IHostContext context) + { + base.Initialize(context); + + _context = context; + _keyFile = context.GetConfigFile(WellKnownConfigFile.RSACredentials); + } + } +} +#endif diff --git a/src/Runner.Listener/Configuration/RSAFileKeyManager.cs b/src/Runner.Listener/Configuration/RSAFileKeyManager.cs new file mode 100644 index 00000000000..37406b1a2f6 --- /dev/null +++ b/src/Runner.Listener/Configuration/RSAFileKeyManager.cs @@ -0,0 +1,97 @@ +#if OS_LINUX || OS_OSX +using System; +using System.IO; +using System.Security.Cryptography; +using System.Threading; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ + public class RSAFileKeyManager : RunnerService, IRSAKeyManager + { + private string _keyFile; + private IHostContext _context; + + public RSACryptoServiceProvider CreateKey() + { + RSACryptoServiceProvider rsa = null; + if (!File.Exists(_keyFile)) + { + Trace.Info("Creating new RSA key using 2048-bit key length"); + + rsa = new RSACryptoServiceProvider(2048); + + // Now write the parameters to disk + IOUtil.SaveObject(new RSAParametersSerializable(rsa.ExportParameters(true)), _keyFile); + Trace.Info("Successfully saved RSA key parameters to file {0}", _keyFile); + + // Try to lock down the credentials_key file to the owner/group + var chmodPath = WhichUtil.Which("chmod", trace: Trace); + if (!String.IsNullOrEmpty(chmodPath)) + { + var arguments = $"600 {new FileInfo(_keyFile).FullName}"; + using (var invoker = _context.CreateService()) + { + var exitCode = invoker.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), chmodPath, arguments, null, default(CancellationToken)).GetAwaiter().GetResult(); + if (exitCode == 0) + { + Trace.Info("Successfully set permissions for RSA key parameters file {0}", _keyFile); + } + else + { + Trace.Warning("Unable to succesfully set permissions for RSA key parameters file {0}. Received exit code {1} from {2}", _keyFile, exitCode, chmodPath); + } + } + } + else + { + Trace.Warning("Unable to locate chmod to set permissions for RSA key parameters file {0}.", _keyFile); + } + } + else + { + Trace.Info("Found existing RSA key parameters file {0}", _keyFile); + + rsa = new RSACryptoServiceProvider(); + rsa.ImportParameters(IOUtil.LoadObject(_keyFile).RSAParameters); + } + + return rsa; + } + + public void DeleteKey() + { + if (File.Exists(_keyFile)) + { + Trace.Info("Deleting RSA key parameters file {0}", _keyFile); + File.Delete(_keyFile); + } + } + + public RSACryptoServiceProvider GetKey() + { + if (!File.Exists(_keyFile)) + { + throw new CryptographicException($"RSA key file {_keyFile} was not found"); + } + + Trace.Info("Loading RSA key parameters from file {0}", _keyFile); + + var parameters = IOUtil.LoadObject(_keyFile).RSAParameters; + var rsa = new RSACryptoServiceProvider(); + rsa.ImportParameters(parameters); + return rsa; + } + + void IRunnerService.Initialize(IHostContext context) + { + base.Initialize(context); + + _context = context; + _keyFile = context.GetConfigFile(WellKnownConfigFile.RSACredentials); + } + } +} +#endif diff --git a/src/Runner.Listener/Configuration/ServiceControlManager.cs b/src/Runner.Listener/Configuration/ServiceControlManager.cs new file mode 100644 index 00000000000..e396ba4e051 --- /dev/null +++ b/src/Runner.Listener/Configuration/ServiceControlManager.cs @@ -0,0 +1,63 @@ +using System; +using System.Linq; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ +#if OS_WINDOWS + [ServiceLocator(Default = typeof(WindowsServiceControlManager))] + public interface IWindowsServiceControlManager : IRunnerService + { + void ConfigureService(RunnerSettings settings, CommandSettings command); + + void UnconfigureService(); + } +#endif + +#if !OS_WINDOWS + +#if OS_LINUX + [ServiceLocator(Default = typeof(SystemDControlManager))] +#elif OS_OSX + [ServiceLocator(Default = typeof(OsxServiceControlManager))] +#endif + public interface ILinuxServiceControlManager : IRunnerService + { + void GenerateScripts(RunnerSettings settings); + } +#endif + + public class ServiceControlManager : RunnerService + { + public void CalculateServiceName(RunnerSettings settings, string serviceNamePattern, string serviceDisplayNamePattern, out string serviceName, out string serviceDisplayName) + { + Trace.Entering(); + serviceName = string.Empty; + serviceDisplayName = string.Empty; + + Uri accountUri = new Uri(settings.ServerUrl); + string accountName = string.Empty; + + if (accountUri.Host.EndsWith(".githubusercontent.com", StringComparison.OrdinalIgnoreCase)) + { + accountName = accountUri.AbsolutePath.Split('/', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault(); + } + else + { + accountName = accountUri.Host.Split('.').FirstOrDefault(); + } + + if (string.IsNullOrEmpty(accountName)) + { + throw new InvalidOperationException($"Cannot find GitHub organization name from server url: '{settings.ServerUrl}'"); + } + + serviceName = StringUtil.Format(serviceNamePattern, accountName, settings.PoolName, settings.AgentName); + serviceDisplayName = StringUtil.Format(serviceDisplayNamePattern, accountName, settings.PoolName, settings.AgentName); + + Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration."); + } + } +} diff --git a/src/Runner.Listener/Configuration/SystemdControlManager.cs b/src/Runner.Listener/Configuration/SystemdControlManager.cs new file mode 100644 index 00000000000..28bd89d92ec --- /dev/null +++ b/src/Runner.Listener/Configuration/SystemdControlManager.cs @@ -0,0 +1,55 @@ +#if OS_LINUX +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ + public class SystemDControlManager : ServiceControlManager, ILinuxServiceControlManager + { + // This is the name you would see when you do `systemctl list-units | grep runner` + private const string _svcNamePattern = "actions.runner.{0}.{1}.{2}.service"; + private const string _svcDisplayPattern = "GitHub Actions Runner ({0}.{1}.{2})"; + private const string _shTemplate = "systemd.svc.sh.template"; + private const string _shName = "svc.sh"; + + public void GenerateScripts(RunnerSettings settings) + { + try + { + string serviceName; + string serviceDisplayName; + CalculateServiceName(settings, _svcNamePattern, _svcDisplayPattern, out serviceName, out serviceDisplayName); + + string svcShPath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), _shName); + + string svcShContent = File.ReadAllText(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), _shTemplate)); + var tokensToReplace = new Dictionary + { + { "{{SvcDescription}}", serviceDisplayName }, + { "{{SvcNameVar}}", serviceName } + }; + + svcShContent = tokensToReplace.Aggregate( + svcShContent, + (current, item) => current.Replace(item.Key, item.Value)); + + File.WriteAllText(svcShPath, svcShContent, new UTF8Encoding(false)); + + var unixUtil = HostContext.CreateService(); + unixUtil.ChmodAsync("755", svcShPath).GetAwaiter().GetResult(); + } + catch (Exception ex) + { + Trace.Error(ex); + throw; + } + } + } +} +#endif diff --git a/src/Runner.Listener/Configuration/Validators.cs b/src/Runner.Listener/Configuration/Validators.cs new file mode 100644 index 00000000000..aa17717c12e --- /dev/null +++ b/src/Runner.Listener/Configuration/Validators.cs @@ -0,0 +1,94 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.IO; +using System.Security.Principal; + +namespace GitHub.Runner.Listener.Configuration +{ + public static class Validators + { + private static String UriHttpScheme = "http"; + private static String UriHttpsScheme = "https"; + + public static bool ServerUrlValidator(string value) + { + try + { + Uri uri; + if (Uri.TryCreate(value, UriKind.Absolute, out uri)) + { + if (uri.Scheme.Equals(UriHttpScheme, StringComparison.OrdinalIgnoreCase) + || uri.Scheme.Equals(UriHttpsScheme, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + } + catch (Exception) + { + return false; + } + + return false; + } + + public static bool AuthSchemeValidator(string value) + { + return CredentialManager.CredentialTypes.ContainsKey(value); + } + + public static bool FilePathValidator(string value) + { + var directoryInfo = new DirectoryInfo(value); + + if (!directoryInfo.Exists) + { + try + { + Directory.CreateDirectory(value); + } + catch (Exception) + { + return false; + } + } + + return true; + } + + public static bool BoolValidator(string value) + { + return string.Equals(value, "true", StringComparison.OrdinalIgnoreCase) || + string.Equals(value, "false", StringComparison.OrdinalIgnoreCase) || + string.Equals(value, "Y", StringComparison.CurrentCultureIgnoreCase) || + string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase); + } + + public static bool NonEmptyValidator(string value) + { + return !string.IsNullOrEmpty(value); + } + + public static bool NTAccountValidator(string arg) + { + if (string.IsNullOrEmpty(arg) || String.IsNullOrEmpty(arg.TrimStart('.', '\\'))) + { + return false; + } + + try + { + var logonAccount = arg.TrimStart('.'); + NTAccount ntaccount = new NTAccount(logonAccount); + SecurityIdentifier sid = (SecurityIdentifier)ntaccount.Translate(typeof(SecurityIdentifier)); + } + catch (IdentityNotMappedException) + { + return false; + } + + return true; + } + } +} diff --git a/src/Runner.Listener/Configuration/WindowsServiceControlManager.cs b/src/Runner.Listener/Configuration/WindowsServiceControlManager.cs new file mode 100644 index 00000000000..ab4dbf3eae1 --- /dev/null +++ b/src/Runner.Listener/Configuration/WindowsServiceControlManager.cs @@ -0,0 +1,172 @@ +#if OS_WINDOWS +using System; +using System.IO; +using System.Linq; +using System.Security; +using System.Security.Principal; +using System.Text; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener.Configuration +{ + public class WindowsServiceControlManager : ServiceControlManager, IWindowsServiceControlManager + { + public const string WindowsServiceControllerName = "RunnerService.exe"; + + private const string ServiceNamePattern = "actionsrunner.{0}.{1}.{2}"; + private const string ServiceDisplayNamePattern = "GitHub Actions Runner ({0}.{1}.{2})"; + + private INativeWindowsServiceHelper _windowsServiceHelper; + private ITerminal _term; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _windowsServiceHelper = HostContext.GetService(); + _term = HostContext.GetService(); + } + + public void ConfigureService(RunnerSettings settings, CommandSettings command) + { + Trace.Entering(); + + if (!_windowsServiceHelper.IsRunningInElevatedMode()) + { + Trace.Error("Needs Administrator privileges for configure runner as windows service."); + throw new SecurityException("Needs Administrator privileges for configuring runner as windows service."); + } + + // We use NetworkService as default account for actions runner + NTAccount defaultServiceAccount = _windowsServiceHelper.GetDefaultServiceAccount(); + string logonAccount = command.GetWindowsLogonAccount(defaultValue: defaultServiceAccount.ToString(), descriptionMsg: "User account to use for the service"); + + string domainName; + string userName; + GetAccountSegments(logonAccount, out domainName, out userName); + + if ((string.IsNullOrEmpty(domainName) || domainName.Equals(".", StringComparison.CurrentCultureIgnoreCase)) && !logonAccount.Contains('@')) + { + logonAccount = String.Format("{0}\\{1}", Environment.MachineName, userName); + domainName = Environment.MachineName; + } + + Trace.Info("LogonAccount after transforming: {0}, user: {1}, domain: {2}", logonAccount, userName, domainName); + + string logonPassword = string.Empty; + if (!defaultServiceAccount.Equals(new NTAccount(logonAccount)) && !NativeWindowsServiceHelper.IsWellKnownIdentity(logonAccount)) + { + while (true) + { + logonPassword = command.GetWindowsLogonPassword(logonAccount); + if (_windowsServiceHelper.IsValidCredential(domainName, userName, logonPassword)) + { + Trace.Info("Credential validation succeed"); + break; + } + else + { + if (!command.Unattended) + { + Trace.Info("Invalid credential entered"); + _term.WriteLine("Invalid windows credentials entered. Try again or ctrl-c to quit"); + } + else + { + throw new SecurityException("Invalid windows credentials entered. Try again or ctrl-c to quit"); + } + } + } + } + + string serviceName; + string serviceDisplayName; + CalculateServiceName(settings, ServiceNamePattern, ServiceDisplayNamePattern, out serviceName, out serviceDisplayName); + if (_windowsServiceHelper.IsServiceExists(serviceName)) + { + _term.WriteLine($"The service already exists: {serviceName}, it will be replaced"); + _windowsServiceHelper.UninstallService(serviceName); + } + + Trace.Info("Verifying if the account has LogonAsService permission"); + if (_windowsServiceHelper.IsUserHasLogonAsServicePrivilege(domainName, userName)) + { + Trace.Info($"Account: {logonAccount} already has Logon As Service Privilege."); + } + else + { + if (!_windowsServiceHelper.GrantUserLogonAsServicePrivilege(domainName, userName)) + { + throw new InvalidOperationException($"Cannot grant LogonAsService permission to the user {logonAccount}"); + } + } + + // grant permission for runner root folder and work folder + Trace.Info("Create local group and grant folder permission to service logon account."); + string runnerRoot = HostContext.GetDirectory(WellKnownDirectory.Root); + string workFolder = HostContext.GetDirectory(WellKnownDirectory.Work); + Directory.CreateDirectory(workFolder); + _windowsServiceHelper.GrantDirectoryPermissionForAccount(logonAccount, new[] { runnerRoot, workFolder }); + _term.WriteLine($"Granting file permissions to '{logonAccount}'."); + + // install service. + _windowsServiceHelper.InstallService(serviceName, serviceDisplayName, logonAccount, logonPassword); + + // create .service file with service name. + SaveServiceSettings(serviceName); + + Trace.Info("Configuration was successful, trying to start the service"); + _windowsServiceHelper.StartService(serviceName); + } + + public void UnconfigureService() + { + if (!_windowsServiceHelper.IsRunningInElevatedMode()) + { + Trace.Error("Needs Administrator privileges for unconfigure windows service runner."); + throw new SecurityException("Needs Administrator privileges for unconfiguring runner that running as windows service."); + } + + string serviceConfigPath = HostContext.GetConfigFile(WellKnownConfigFile.Service); + string serviceName = File.ReadAllText(serviceConfigPath); + if (_windowsServiceHelper.IsServiceExists(serviceName)) + { + _windowsServiceHelper.StopService(serviceName); + _windowsServiceHelper.UninstallService(serviceName); + + // Delete local group we created during configure. + string runnerRoot = HostContext.GetDirectory(WellKnownDirectory.Root); + string workFolder = HostContext.GetDirectory(WellKnownDirectory.Work); + _windowsServiceHelper.RevokeDirectoryPermissionForAccount(new[] { runnerRoot, workFolder }); + } + + IOUtil.DeleteFile(serviceConfigPath); + } + + private void SaveServiceSettings(string serviceName) + { + string serviceConfigPath = HostContext.GetConfigFile(WellKnownConfigFile.Service); + if (File.Exists(serviceConfigPath)) + { + IOUtil.DeleteFile(serviceConfigPath); + } + + File.WriteAllText(serviceConfigPath, serviceName, new UTF8Encoding(false)); + File.SetAttributes(serviceConfigPath, File.GetAttributes(serviceConfigPath) | FileAttributes.Hidden); + } + + private void GetAccountSegments(string account, out string domain, out string user) + { + string[] segments = account.Split('\\'); + domain = string.Empty; + user = account; + if (segments.Length == 2) + { + domain = segments[0]; + user = segments[1]; + } + } + } +} +#endif diff --git a/src/Runner.Listener/JobDispatcher.cs b/src/Runner.Listener/JobDispatcher.cs new file mode 100644 index 00000000000..74fec853017 --- /dev/null +++ b/src/Runner.Listener/JobDispatcher.cs @@ -0,0 +1,909 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using GitHub.Services.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using System.Linq; +using GitHub.Services.Common; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener +{ + [ServiceLocator(Default = typeof(JobDispatcher))] + public interface IJobDispatcher : IRunnerService + { + TaskCompletionSource RunOnceJobCompleted { get; } + void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false); + bool Cancel(JobCancelMessage message); + Task WaitAsync(CancellationToken token); + TaskResult GetLocalRunJobResult(AgentJobRequestMessage message); + Task ShutdownAsync(); + } + + // This implementation of IDobDispatcher is not thread safe. + // It is base on the fact that the current design of runner is dequeue + // and process one message from message queue everytime. + // In addition, it only execute one job every time, + // and server will not send another job while this one is still running. + public sealed class JobDispatcher : RunnerService, IJobDispatcher + { + private readonly Lazy> _localRunJobResult = new Lazy>(); + private int _poolId; + RunnerSettings _runnerSetting; + private static readonly string _workerProcessName = $"Runner.Worker{IOUtil.ExeExtension}"; + + // this is not thread-safe + private readonly Queue _jobDispatchedQueue = new Queue(); + private readonly ConcurrentDictionary _jobInfos = new ConcurrentDictionary(); + + //allow up to 30sec for any data to be transmitted over the process channel + //timeout limit can be overwrite by environment GITHUB_ACTIONS_RUNNER_CHANNEL_TIMEOUT + private TimeSpan _channelTimeout; + + private TaskCompletionSource _runOnceJobCompleted = new TaskCompletionSource(); + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + // get pool id from config + var configurationStore = hostContext.GetService(); + _runnerSetting = configurationStore.GetSettings(); + _poolId = _runnerSetting.PoolId; + + int channelTimeoutSeconds; + if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_CHANNEL_TIMEOUT") ?? string.Empty, out channelTimeoutSeconds)) + { + channelTimeoutSeconds = 30; + } + + // _channelTimeout should in range [30, 300] seconds + _channelTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(channelTimeoutSeconds, 30), 300)); + Trace.Info($"Set runner/worker IPC timeout to {_channelTimeout.TotalSeconds} seconds."); + } + + public TaskCompletionSource RunOnceJobCompleted => _runOnceJobCompleted; + + public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false) + { + Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received."); + + WorkerDispatcher currentDispatch = null; + if (_jobDispatchedQueue.Count > 0) + { + Guid dispatchedJobId = _jobDispatchedQueue.Dequeue(); + if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch)) + { + Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}."); + } + } + + WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId); + if (runOnce) + { + Trace.Info("Start dispatcher for one time used runner."); + newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token); + } + else + { + newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token); + } + + _jobInfos.TryAdd(newDispatch.JobId, newDispatch); + _jobDispatchedQueue.Enqueue(newDispatch.JobId); + } + + public bool Cancel(JobCancelMessage jobCancelMessage) + { + Trace.Info($"Job cancellation request {jobCancelMessage.JobId} received, cancellation timeout {jobCancelMessage.Timeout.TotalMinutes} minutes."); + + WorkerDispatcher workerDispatcher; + if (!_jobInfos.TryGetValue(jobCancelMessage.JobId, out workerDispatcher)) + { + Trace.Verbose($"Job request {jobCancelMessage.JobId} is not a current running job, ignore cancllation request."); + return false; + } + else + { + if (workerDispatcher.Cancel(jobCancelMessage.Timeout)) + { + Trace.Verbose($"Fired cancellation token for job request {workerDispatcher.JobId}."); + } + + return true; + } + } + + public async Task WaitAsync(CancellationToken token) + { + WorkerDispatcher currentDispatch = null; + Guid dispatchedJobId; + if (_jobDispatchedQueue.Count > 0) + { + dispatchedJobId = _jobDispatchedQueue.Dequeue(); + if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch)) + { + Trace.Verbose($"Retrive previous WorkerDispather for job {currentDispatch.JobId}."); + } + } + else + { + Trace.Verbose($"There is no running WorkerDispather needs to await."); + } + + if (currentDispatch != null) + { + using (var registration = token.Register(() => { if (currentDispatch.Cancel(TimeSpan.FromSeconds(60))) { Trace.Verbose($"Fired cancellation token for job request {currentDispatch.JobId}."); } })) + { + try + { + Trace.Info($"Waiting WorkerDispather for job {currentDispatch.JobId} run to finish."); + await currentDispatch.WorkerDispatch; + Trace.Info($"Job request {currentDispatch.JobId} processed succeed."); + } + catch (Exception ex) + { + Trace.Error($"Worker Dispatch failed with an exception for job request {currentDispatch.JobId}."); + Trace.Error(ex); + } + finally + { + WorkerDispatcher workerDispatcher; + if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher)) + { + Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}."); + workerDispatcher.Dispose(); + } + } + } + } + } + + public TaskResult GetLocalRunJobResult(AgentJobRequestMessage message) + { + return _localRunJobResult.Value[message.RequestId]; + } + + public async Task ShutdownAsync() + { + Trace.Info($"Shutting down JobDispatcher. Make sure all WorkerDispatcher has finished."); + WorkerDispatcher currentDispatch = null; + if (_jobDispatchedQueue.Count > 0) + { + Guid dispatchedJobId = _jobDispatchedQueue.Dequeue(); + if (_jobInfos.TryGetValue(dispatchedJobId, out currentDispatch)) + { + try + { + Trace.Info($"Ensure WorkerDispather for job {currentDispatch.JobId} run to finish, cancel any running job."); + await EnsureDispatchFinished(currentDispatch, cancelRunningJob: true); + } + catch (Exception ex) + { + Trace.Error($"Catching worker dispatch exception for job request {currentDispatch.JobId} durning job dispatcher shut down."); + Trace.Error(ex); + } + finally + { + WorkerDispatcher workerDispatcher; + if (_jobInfos.TryRemove(currentDispatch.JobId, out workerDispatcher)) + { + Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {currentDispatch.JobId}."); + workerDispatcher.Dispose(); + } + } + } + } + } + + private async Task EnsureDispatchFinished(WorkerDispatcher jobDispatch, bool cancelRunningJob = false) + { + if (!jobDispatch.WorkerDispatch.IsCompleted) + { + if (cancelRunningJob) + { + // cancel running job when shutting down the runner. + // this will happen when runner get Ctrl+C or message queue loop crashed. + jobDispatch.WorkerCancellationTokenSource.Cancel(); + // wait for worker process exit then return. + await jobDispatch.WorkerDispatch; + + return; + } + + // base on the current design, server will only send one job for a given runner everytime. + // if the runner received a new job request while a previous job request is still running, this typically indicate two situations + // 1. an runner bug cause server and runner mismatch on the state of the job request, ex. runner not renew jobrequest properly but think it still own the job reqest, however server already abandon the jobrequest. + // 2. a server bug or design change that allow server send more than one job request to an given runner that haven't finish previous job request. + var runnerServer = HostContext.GetService(); + TaskAgentJobRequest request = null; + try + { + request = await runnerServer.GetAgentRequestAsync(_poolId, jobDispatch.RequestId, CancellationToken.None); + } + catch (Exception ex) + { + // we can't even query for the jobrequest from server, something totally busted, stop runner/worker. + Trace.Error($"Catch exception while checking jobrequest {jobDispatch.JobId} status. Cancel running worker right away."); + Trace.Error(ex); + + jobDispatch.WorkerCancellationTokenSource.Cancel(); + // make sure worker process exit before we rethrow, otherwise we might leave orphan worker process behind. + await jobDispatch.WorkerDispatch; + + // rethrow original exception + throw; + } + + if (request.Result != null) + { + // job request has been finished, the server already has result. + // this means runner is busted since it still running that request. + // cancel the zombie worker, run next job request. + Trace.Error($"Received job request while previous job {jobDispatch.JobId} still running on worker. Cancel the previous job since the job request have been finished on server side with result: {request.Result.Value}."); + jobDispatch.WorkerCancellationTokenSource.Cancel(); + + // wait 45 sec for worker to finish. + Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45))); + if (completedTask != jobDispatch.WorkerDispatch) + { + // at this point, the job exectuion might encounter some dead lock and even not able to be canclled. + // no need to localize the exception string should never happen. + throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds."); + } + } + else + { + // something seriously wrong on server side. stop runner from continue running. + // no need to localize the exception string should never happen. + throw new InvalidOperationException($"Server send a new job request while the previous job request {jobDispatch.JobId} haven't finished."); + } + } + + try + { + await jobDispatch.WorkerDispatch; + Trace.Info($"Job request {jobDispatch.JobId} processed succeed."); + } + catch (Exception ex) + { + Trace.Error($"Worker Dispatch failed with an exception for job request {jobDispatch.JobId}."); + Trace.Error(ex); + } + finally + { + WorkerDispatcher workerDispatcher; + if (_jobInfos.TryRemove(jobDispatch.JobId, out workerDispatcher)) + { + Trace.Verbose($"Remove WorkerDispather from {nameof(_jobInfos)} dictionary for job {jobDispatch.JobId}."); + workerDispatcher.Dispose(); + } + } + } + + private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken) + { + try + { + await RunAsync(message, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken); + } + finally + { + Trace.Info("Fire signal for one time used runner."); + _runOnceJobCompleted.TrySetResult(true); + } + } + + private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken) + { + if (previousJobDispatch != null) + { + Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker."); + await EnsureDispatchFinished(previousJobDispatch); + } + else + { + Trace.Verbose($"This is the first job request."); + } + + var term = HostContext.GetService(); + term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}"); + + // first job request renew succeed. + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + var notification = HostContext.GetService(); + + // lock renew cancellation token. + using (var lockRenewalTokenSource = new CancellationTokenSource()) + using (var workerProcessCancelTokenSource = new CancellationTokenSource()) + { + long requestId = message.RequestId; + Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat + + // start renew job request + Trace.Info($"Start renew job request {requestId} for job {message.JobId}."); + Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token); + + // wait till first renew succeed or job request is canceled + // not even start worker if the first renew fail + await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken)); + + if (renewJobRequest.IsCompleted) + { + // renew job request task complete means we run out of retry for the first job request renew. + Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker."); + return; + } + + if (jobRequestCancellationToken.IsCancellationRequested) + { + Trace.Info($"Stop renew job request for job {message.JobId}."); + // stop renew lock + lockRenewalTokenSource.Cancel(); + // renew job request should never blows up. + await renewJobRequest; + + // complete job request with result Cancelled + await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled); + return; + } + + HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}"); + + Task workerProcessTask = null; + object _outputLock = new object(); + List workerOutput = new List(); + using (var processChannel = HostContext.CreateService()) + using (var processInvoker = HostContext.CreateService()) + { + // Start the process channel. + // It's OK if StartServer bubbles an execption after the worker process has already started. + // The worker will shutdown after 30 seconds if it hasn't received the job message. + processChannel.StartServer( + // Delegate to start the child process. + startProcess: (string pipeHandleOut, string pipeHandleIn) => + { + // Validate args. + ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut)); + ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn)); + + if (HostContext.RunMode == RunMode.Normal) + { + // Save STDOUT from worker, worker will use STDOUT report unhandle exception. + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout) + { + if (!string.IsNullOrEmpty(stdout.Data)) + { + lock (_outputLock) + { + workerOutput.Add(stdout.Data); + } + } + }; + + // Save STDERR from worker, worker will use STDERR on crash. + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr) + { + if (!string.IsNullOrEmpty(stderr.Data)) + { + lock (_outputLock) + { + workerOutput.Add(stderr.Data); + } + } + }; + } + else if (HostContext.RunMode == RunMode.Local) + { + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => Console.WriteLine(e.Data); + processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs e) => Console.WriteLine(e.Data); + } + + // Start the child process. + HostContext.WritePerfCounter("StartingWorkerProcess"); + var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin); + string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName); + workerProcessTask = processInvoker.ExecuteAsync( + workingDirectory: assemblyDirectory, + fileName: workerFileName, + arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn, + environment: null, + requireExitCodeZero: false, + outputEncoding: null, + killProcessOnCancel: true, + redirectStandardIn: null, + inheritConsoleHandler: false, + keepStandardInOpen: false, + highPriorityProcess: true, + cancellationToken: workerProcessCancelTokenSource.Token); + }); + + // Send the job request message. + // Kill the worker process if sending the job message times out. The worker + // process may have successfully received the job message. + try + { + Trace.Info($"Send job request message to worker for job {message.JobId}."); + HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}"); + using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout)) + { + await processChannel.SendAsync( + messageType: MessageType.NewJobRequest, + body: JsonUtility.ToString(message), + cancellationToken: csSendJobRequest.Token); + } + } + catch (OperationCanceledException) + { + // message send been cancelled. + // timeout 30 sec. kill worker. + Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker."); + workerProcessCancelTokenSource.Cancel(); + try + { + await workerProcessTask; + } + catch (OperationCanceledException) + { + Trace.Info("worker process has been killed."); + } + + Trace.Info($"Stop renew job request for job {message.JobId}."); + // stop renew lock + lockRenewalTokenSource.Cancel(); + // renew job request should never blows up. + await renewJobRequest; + + // not finish the job request since the job haven't run on worker at all, we will not going to set a result to server. + return; + } + + // we get first jobrequest renew succeed and start the worker process with the job message. + // send notification to machine provisioner. + var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"]; + await notification.JobStarted(message.JobId, accessToken, systemConnection.Url); + + HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}"); + + try + { + TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded; + // wait for renewlock, worker process or cancellation token been fired. + var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken)); + if (completedTask == workerProcessTask) + { + // worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished. + int returnCode = await workerProcessTask; + Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode); + + string detailInfo = null; + if (!TaskResultUtil.IsValidReturnCode(returnCode)) + { + detailInfo = string.Join(Environment.NewLine, workerOutput); + Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result."); + await LogWorkerProcessUnhandledException(message, detailInfo); + } + + TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode); + Trace.Info($"finish job request for job {message.JobId} with result: {result}"); + term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}"); + + Trace.Info($"Stop renew job request for job {message.JobId}."); + // stop renew lock + lockRenewalTokenSource.Cancel(); + // renew job request should never blows up. + await renewJobRequest; + + // complete job request + await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo); + + // print out unhandled exception happened in worker after we complete job request. + // when we run out of disk space, report back to server has higher priority. + if (!string.IsNullOrEmpty(detailInfo)) + { + Trace.Error("Unhandled exception happened in worker:"); + Trace.Error(detailInfo); + } + + return; + } + else if (completedTask == renewJobRequest) + { + resultOnAbandonOrCancel = TaskResult.Abandoned; + } + else + { + resultOnAbandonOrCancel = TaskResult.Canceled; + } + + // renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage) + // cancel worker gracefully first, then kill it after worker cancel timeout + try + { + Trace.Info($"Send job cancellation message to worker for job {message.JobId}."); + using (var csSendCancel = new CancellationTokenSource(_channelTimeout)) + { + var messageType = MessageType.CancelRequest; + if (HostContext.RunnerShutdownToken.IsCancellationRequested) + { + switch (HostContext.RunnerShutdownReason) + { + case ShutdownReason.UserCancelled: + messageType = MessageType.RunnerShutdown; + break; + case ShutdownReason.OperatingSystemShutdown: + messageType = MessageType.OperatingSystemShutdown; + break; + } + } + + await processChannel.SendAsync( + messageType: messageType, + body: string.Empty, + cancellationToken: csSendCancel.Token); + } + } + catch (OperationCanceledException) + { + // message send been cancelled. + Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker."); + workerProcessCancelTokenSource.Cancel(); + try + { + await workerProcessTask; + } + catch (OperationCanceledException) + { + Trace.Info("worker process has been killed."); + } + } + + // wait worker to exit + // if worker doesn't exit within timeout, then kill worker. + completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken)); + + // worker haven't exit within cancellation timeout. + if (completedTask != workerProcessTask) + { + Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker."); + workerProcessCancelTokenSource.Cancel(); + try + { + await workerProcessTask; + } + catch (OperationCanceledException) + { + Trace.Info("worker process has been killed."); + } + } + + Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}"); + term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}"); + // complete job request with cancel result, stop renew lock, job has finished. + + Trace.Info($"Stop renew job request for job {message.JobId}."); + // stop renew lock + lockRenewalTokenSource.Cancel(); + // renew job request should never blows up. + await renewJobRequest; + + // complete job request + await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel); + } + finally + { + // This should be the last thing to run so we don't notify external parties until actually finished + await notification.JobCompleted(message.JobId); + } + } + } + } + + public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource firstJobRequestRenewed, CancellationToken token) + { + var runnerServer = HostContext.GetService(); + TaskAgentJobRequest request = null; + int firstRenewRetryLimit = 5; + int encounteringError = 0; + + // renew lock during job running. + // stop renew only if cancellation token for lock renew task been signal or exception still happen after retry. + while (!token.IsCancellationRequested) + { + try + { + request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, token); + + Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}"); + + if (!firstJobRequestRenewed.Task.IsCompleted) + { + // fire first renew succeed event. + firstJobRequestRenewed.TrySetResult(0); + } + + if (encounteringError > 0) + { + encounteringError = 0; + runnerServer.SetConnectionTimeout(RunnerConnectionType.JobRequest, TimeSpan.FromSeconds(60)); + HostContext.WritePerfCounter("JobRenewRecovered"); + } + + // renew again after 60 sec delay + await HostContext.Delay(TimeSpan.FromSeconds(60), token); + } + catch (TaskAgentJobNotFoundException) + { + // no need for retry. the job is not valid anymore. + Trace.Info($"TaskAgentJobNotFoundException received when renew job request {requestId}, job is no longer valid, stop renew job request."); + return; + } + catch (TaskAgentJobTokenExpiredException) + { + // no need for retry. the job is not valid anymore. + Trace.Info($"TaskAgentJobTokenExpiredException received renew job request {requestId}, job is no longer valid, stop renew job request."); + return; + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + // OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance(); + // Stop renew only on cancellation token fired. + Trace.Info($"job renew has been canceled, stop renew job request {requestId}."); + return; + } + catch (Exception ex) + { + Trace.Error($"Catch exception during renew runner jobrequest {requestId}."); + Trace.Error(ex); + encounteringError++; + + // retry + TimeSpan remainingTime = TimeSpan.Zero; + if (!firstJobRequestRenewed.Task.IsCompleted) + { + // retry 5 times every 10 sec for the first renew + if (firstRenewRetryLimit-- > 0) + { + remainingTime = TimeSpan.FromSeconds(10); + } + } + else + { + // retry till reach lockeduntil + 5 mins extra buffer. + remainingTime = request.LockedUntil.Value + TimeSpan.FromMinutes(5) - DateTime.UtcNow; + } + + if (remainingTime > TimeSpan.Zero) + { + TimeSpan delayTime; + if (!firstJobRequestRenewed.Task.IsCompleted) + { + Trace.Info($"Retrying lock renewal for jobrequest {requestId}. The first job renew request has failed."); + delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + } + else + { + Trace.Info($"Retrying lock renewal for jobrequest {requestId}. Job is valid until {request.LockedUntil.Value}."); + if (encounteringError > 5) + { + delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30)); + } + else + { + delayTime = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15)); + } + } + + // Re-establish connection to server in order to avoid affinity with server. + // Reduce connection timeout to 30 seconds (from 60s) + HostContext.WritePerfCounter("ResetJobRenewConnection"); + await runnerServer.RefreshConnectionAsync(RunnerConnectionType.JobRequest, TimeSpan.FromSeconds(30)); + + try + { + // back-off before next retry. + await HostContext.Delay(delayTime, token); + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + Trace.Info($"job renew has been canceled, stop renew job request {requestId}."); + } + } + else + { + Trace.Info($"Lock renewal has run out of retry, stop renew lock for jobrequest {requestId}."); + HostContext.WritePerfCounter("JobRenewReachLimit"); + return; + } + } + } + } + + // TODO: We need send detailInfo back to DT in order to add an issue for the job + private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null) + { + Trace.Entering(); + if (HostContext.RunMode == RunMode.Local) + { + _localRunJobResult.Value[message.RequestId] = result; + return; + } + + if (PlanUtil.GetFeatures(message.Plan).HasFlag(PlanFeatures.JobCompletedPlanEvent)) + { + Trace.Verbose($"Skip FinishAgentRequest call from Listener because Plan version is {message.Plan.Version}"); + return; + } + + var runnerServer = HostContext.GetService(); + int completeJobRequestRetryLimit = 5; + List exceptions = new List(); + while (completeJobRequestRetryLimit-- > 0) + { + try + { + await runnerServer.FinishAgentRequestAsync(poolId, message.RequestId, lockToken, DateTime.UtcNow, result, CancellationToken.None); + return; + } + catch (TaskAgentJobNotFoundException) + { + Trace.Info($"TaskAgentJobNotFoundException received, job {message.JobId} is no longer valid."); + return; + } + catch (TaskAgentJobTokenExpiredException) + { + Trace.Info($"TaskAgentJobTokenExpiredException received, job {message.JobId} is no longer valid."); + return; + } + catch (Exception ex) + { + Trace.Error($"Catch exception during complete runner jobrequest {message.RequestId}."); + Trace.Error(ex); + exceptions.Add(ex); + } + + // delay 5 seconds before next retry. + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + // rethrow all catched exceptions during retry. + throw new AggregateException(exceptions); + } + + // log an error issue to job level timeline record + private async Task LogWorkerProcessUnhandledException(Pipelines.AgentJobRequestMessage message, string errorMessage) + { + try + { + var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection)); + ArgUtil.NotNull(systemConnection, nameof(systemConnection)); + + var jobServer = HostContext.GetService(); + VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection); + Uri jobServerUrl = systemConnection.Url; + + // Make sure SystemConnection Url match Config Url base for OnPremises server + if (!message.Variables.ContainsKey(Constants.Variables.System.ServerType) || + string.Equals(message.Variables[Constants.Variables.System.ServerType]?.Value, "OnPremises", StringComparison.OrdinalIgnoreCase)) + { + try + { + Uri result = null; + Uri configUri = new Uri(_runnerSetting.ServerUrl); + if (Uri.TryCreate(new Uri(configUri.GetComponents(UriComponents.SchemeAndServer, UriFormat.Unescaped)), jobServerUrl.PathAndQuery, out result)) + { + //replace the schema and host portion of messageUri with the host from the + //server URI (which was set at config time) + jobServerUrl = result; + } + } + catch (InvalidOperationException ex) + { + //cannot parse the Uri - not a fatal error + Trace.Error(ex); + } + catch (UriFormatException ex) + { + //cannot parse the Uri - not a fatal error + Trace.Error(ex); + } + } + + VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential); + await jobServer.ConnectAsync(jobConnection); + var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None); + ArgUtil.NotNull(timeline, nameof(timeline)); + TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job"); + ArgUtil.NotNull(jobRecord, nameof(jobRecord)); + jobRecord.ErrorCount++; + jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage }); + await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None); + } + catch (Exception ex) + { + Trace.Error("Fail to report unhandled exception from Runner.Worker process"); + Trace.Error(ex); + } + } + + private class WorkerDispatcher : IDisposable + { + public long RequestId { get; } + public Guid JobId { get; } + public Task WorkerDispatch { get; set; } + public CancellationTokenSource WorkerCancellationTokenSource { get; private set; } + public CancellationTokenSource WorkerCancelTimeoutKillTokenSource { get; private set; } + private readonly object _lock = new object(); + + public WorkerDispatcher(Guid jobId, long requestId) + { + JobId = jobId; + RequestId = requestId; + WorkerCancelTimeoutKillTokenSource = new CancellationTokenSource(); + WorkerCancellationTokenSource = new CancellationTokenSource(); + } + + public bool Cancel(TimeSpan timeout) + { + if (WorkerCancellationTokenSource != null && WorkerCancelTimeoutKillTokenSource != null) + { + lock (_lock) + { + if (WorkerCancellationTokenSource != null && WorkerCancelTimeoutKillTokenSource != null) + { + WorkerCancellationTokenSource.Cancel(); + + // make sure we have at least 60 seconds for cancellation. + if (timeout.TotalSeconds < 60) + { + timeout = TimeSpan.FromSeconds(60); + } + + WorkerCancelTimeoutKillTokenSource.CancelAfter(timeout.Subtract(TimeSpan.FromSeconds(15))); + return true; + } + } + } + + return false; + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (WorkerCancellationTokenSource != null || WorkerCancelTimeoutKillTokenSource != null) + { + lock (_lock) + { + if (WorkerCancellationTokenSource != null) + { + WorkerCancellationTokenSource.Dispose(); + WorkerCancellationTokenSource = null; + } + + if (WorkerCancelTimeoutKillTokenSource != null) + { + WorkerCancelTimeoutKillTokenSource.Dispose(); + WorkerCancelTimeoutKillTokenSource = null; + } + } + } + } + } + } + } +} diff --git a/src/Runner.Listener/MessageListener.cs b/src/Runner.Listener/MessageListener.cs new file mode 100644 index 00000000000..2c6a1d6ac82 --- /dev/null +++ b/src/Runner.Listener/MessageListener.cs @@ -0,0 +1,407 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Util; +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using System.Security.Cryptography; +using System.IO; +using System.Text; +using GitHub.Services.WebApi; +using GitHub.Services.OAuth; +using System.Diagnostics; +using System.Runtime.InteropServices; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener +{ + [ServiceLocator(Default = typeof(MessageListener))] + public interface IMessageListener : IRunnerService + { + Task CreateSessionAsync(CancellationToken token); + Task DeleteSessionAsync(); + Task GetNextMessageAsync(CancellationToken token); + Task DeleteMessageAsync(TaskAgentMessage message); + } + + public sealed class MessageListener : RunnerService, IMessageListener + { + private long? _lastMessageId; + private RunnerSettings _settings; + private ITerminal _term; + private IRunnerServer _runnerServer; + private TaskAgentSession _session; + private TimeSpan _getNextMessageRetryInterval; + private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30); + private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4); + private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30); + private readonly Dictionary _sessionCreationExceptionTracker = new Dictionary(); + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _term = HostContext.GetService(); + _runnerServer = HostContext.GetService(); + } + + public async Task CreateSessionAsync(CancellationToken token) + { + Trace.Entering(); + + // Settings + var configManager = HostContext.GetService(); + _settings = configManager.LoadSettings(); + var serverUrl = _settings.ServerUrl; + Trace.Info(_settings); + + // Capabilities. + Dictionary systemCapabilities = await HostContext.GetService().GetCapabilitiesAsync(_settings, token); + + // Create connection. + Trace.Info("Loading Credentials"); + var credMgr = HostContext.GetService(); + VssCredentials creds = credMgr.LoadCredentials(); + + var agent = new TaskAgentReference + { + Id = _settings.AgentId, + Name = _settings.AgentName, + Version = BuildConstants.RunnerPackage.Version, + OSDescription = RuntimeInformation.OSDescription, + }; + string sessionName = $"{Environment.MachineName ?? "RUNNER"}"; + var taskAgentSession = new TaskAgentSession(sessionName, agent, systemCapabilities); + + string errorMessage = string.Empty; + bool encounteringError = false; + + while (true) + { + token.ThrowIfCancellationRequested(); + Trace.Info($"Attempt to create session."); + try + { + Trace.Info("Connecting to the Agent Server..."); + await _runnerServer.ConnectAsync(new Uri(serverUrl), creds); + Trace.Info("VssConnection created"); + + _term.WriteLine(); + _term.WriteSuccessMessage("Connected to GitHub"); + _term.WriteLine(); + + _session = await _runnerServer.CreateAgentSessionAsync( + _settings.PoolId, + taskAgentSession, + token); + + Trace.Info($"Session created."); + if (encounteringError) + { + _term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected."); + _sessionCreationExceptionTracker.Clear(); + encounteringError = false; + } + + return true; + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + Trace.Info("Session creation has been cancelled."); + throw; + } + catch (TaskAgentAccessTokenExpiredException) + { + Trace.Info("Agent OAuth token has been revoked. Session creation failed."); + throw; + } + catch (Exception ex) + { + Trace.Error("Catch exception during create session."); + Trace.Error(ex); + + if (!IsSessionCreationExceptionRetriable(ex)) + { + _term.WriteError($"Failed to create session. {ex.Message}"); + return false; + } + + if (!encounteringError) //print the message only on the first error + { + _term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected."); + encounteringError = true; + } + + Trace.Info("Sleeping for {0} seconds before retrying.", _sessionCreationRetryInterval.TotalSeconds); + await HostContext.Delay(_sessionCreationRetryInterval, token); + } + } + } + + public async Task DeleteSessionAsync() + { + if (_session != null && _session.SessionId != Guid.Empty) + { + using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + { + await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token); + } + } + } + + public async Task GetNextMessageAsync(CancellationToken token) + { + Trace.Entering(); + ArgUtil.NotNull(_session, nameof(_session)); + ArgUtil.NotNull(_settings, nameof(_settings)); + bool encounteringError = false; + int continuousError = 0; + string errorMessage = string.Empty; + Stopwatch heartbeat = new Stopwatch(); + heartbeat.Restart(); + while (true) + { + token.ThrowIfCancellationRequested(); + TaskAgentMessage message = null; + try + { + message = await _runnerServer.GetAgentMessageAsync(_settings.PoolId, + _session.SessionId, + _lastMessageId, + token); + + // Decrypt the message body if the session is using encryption + message = DecryptMessage(message); + + if (message != null) + { + _lastMessageId = message.MessageId; + } + + if (encounteringError) //print the message once only if there was an error + { + _term.WriteLine($"{DateTime.UtcNow:u}: Runner reconnected."); + encounteringError = false; + continuousError = 0; + } + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + Trace.Info("Get next message has been cancelled."); + throw; + } + catch (TaskAgentAccessTokenExpiredException) + { + Trace.Info("Agent OAuth token has been revoked. Unable to pull message."); + throw; + } + catch (Exception ex) + { + Trace.Error("Catch exception during get next message."); + Trace.Error(ex); + + // don't retry if SkipSessionRecover = true, DT service will delete agent session to stop agent from taking more jobs. + if (ex is TaskAgentSessionExpiredException && !_settings.SkipSessionRecover && await CreateSessionAsync(token)) + { + Trace.Info($"{nameof(TaskAgentSessionExpiredException)} received, recovered by recreate session."); + } + else if (!IsGetNextMessageExceptionRetriable(ex)) + { + throw; + } + else + { + continuousError++; + //retry after a random backoff to avoid service throttling + //in case of there is a service error happened and all agents get kicked off of the long poll and all agent try to reconnect back at the same time. + if (continuousError <= 5) + { + // random backoff [15, 30] + _getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(15), TimeSpan.FromSeconds(30), _getNextMessageRetryInterval); + } + else + { + // more aggressive backoff [30, 60] + _getNextMessageRetryInterval = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(30), TimeSpan.FromSeconds(60), _getNextMessageRetryInterval); + } + + if (!encounteringError) + { + //print error only on the first consecutive error + _term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected."); + encounteringError = true; + } + + // re-create VssConnection before next retry + await _runnerServer.RefreshConnectionAsync(RunnerConnectionType.MessageQueue, TimeSpan.FromSeconds(60)); + + Trace.Info("Sleeping for {0} seconds before retrying.", _getNextMessageRetryInterval.TotalSeconds); + await HostContext.Delay(_getNextMessageRetryInterval, token); + } + } + + if (message == null) + { + if (heartbeat.Elapsed > TimeSpan.FromMinutes(30)) + { + Trace.Info($"No message retrieved from session '{_session.SessionId}' within last 30 minutes."); + heartbeat.Restart(); + } + else + { + Trace.Verbose($"No message retrieved from session '{_session.SessionId}'."); + } + + continue; + } + + Trace.Info($"Message '{message.MessageId}' received from session '{_session.SessionId}'."); + return message; + } + } + + public async Task DeleteMessageAsync(TaskAgentMessage message) + { + Trace.Entering(); + ArgUtil.NotNull(_session, nameof(_session)); + + if (message != null && _session.SessionId != Guid.Empty) + { + using (var cs = new CancellationTokenSource(TimeSpan.FromSeconds(30))) + { + await _runnerServer.DeleteAgentMessageAsync(_settings.PoolId, message.MessageId, _session.SessionId, cs.Token); + } + } + } + + private TaskAgentMessage DecryptMessage(TaskAgentMessage message) + { + if (_session.EncryptionKey == null || + _session.EncryptionKey.Value.Length == 0 || + message == null || + message.IV == null || + message.IV.Length == 0) + { + return message; + } + + using (var aes = Aes.Create()) + using (var decryptor = GetMessageDecryptor(aes, message)) + using (var body = new MemoryStream(Convert.FromBase64String(message.Body))) + using (var cryptoStream = new CryptoStream(body, decryptor, CryptoStreamMode.Read)) + using (var bodyReader = new StreamReader(cryptoStream, Encoding.UTF8)) + { + message.Body = bodyReader.ReadToEnd(); + } + + return message; + } + + private ICryptoTransform GetMessageDecryptor( + Aes aes, + TaskAgentMessage message) + { + if (_session.EncryptionKey.Encrypted) + { + // The agent session encryption key uses the AES symmetric algorithm + var keyManager = HostContext.GetService(); + using (var rsa = keyManager.GetKey()) + { + return aes.CreateDecryptor(rsa.Decrypt(_session.EncryptionKey.Value, RSAEncryptionPadding.OaepSHA1), message.IV); + } + } + else + { + return aes.CreateDecryptor(_session.EncryptionKey.Value, message.IV); + } + } + + private bool IsGetNextMessageExceptionRetriable(Exception ex) + { + if (ex is TaskAgentNotFoundException || + ex is TaskAgentPoolNotFoundException || + ex is TaskAgentSessionExpiredException || + ex is AccessDeniedException || + ex is VssUnauthorizedException) + { + Trace.Info($"Non-retriable exception: {ex.Message}"); + return false; + } + else + { + Trace.Info($"Retriable exception: {ex.Message}"); + return true; + } + } + + private bool IsSessionCreationExceptionRetriable(Exception ex) + { + if (ex is TaskAgentNotFoundException) + { + Trace.Info("The agent no longer exists on the server. Stopping the runner."); + _term.WriteError("The runner no longer exists on the server. Please reconfigure the runner."); + return false; + } + else if (ex is TaskAgentSessionConflictException) + { + Trace.Info("The session for this runner already exists."); + _term.WriteError("A session for this runner already exists."); + if (_sessionCreationExceptionTracker.ContainsKey(nameof(TaskAgentSessionConflictException))) + { + _sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)]++; + if (_sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] * _sessionCreationRetryInterval.TotalSeconds >= _sessionConflictRetryLimit.TotalSeconds) + { + Trace.Info("The session conflict exception have reached retry limit."); + _term.WriteError($"Stop retry on SessionConflictException after retried for {_sessionConflictRetryLimit.TotalSeconds} seconds."); + return false; + } + } + else + { + _sessionCreationExceptionTracker[nameof(TaskAgentSessionConflictException)] = 1; + } + + Trace.Info("The session conflict exception haven't reached retry limit."); + return true; + } + else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is")) + { + Trace.Info("Local clock might skewed."); + _term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again."); + if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException))) + { + _sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)]++; + if (_sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] * _sessionCreationRetryInterval.TotalSeconds >= _clockSkewRetryLimit.TotalSeconds) + { + Trace.Info("The OAuth token request exception have reached retry limit."); + _term.WriteError($"Stopped retrying OAuth token request exception after {_clockSkewRetryLimit.TotalSeconds} seconds."); + return false; + } + } + else + { + _sessionCreationExceptionTracker[nameof(VssOAuthTokenRequestException)] = 1; + } + + Trace.Info("The OAuth token request exception haven't reached retry limit."); + return true; + } + else if (ex is TaskAgentPoolNotFoundException || + ex is AccessDeniedException || + ex is VssUnauthorizedException) + { + Trace.Info($"Non-retriable exception: {ex.Message}"); + return false; + } + else + { + Trace.Info($"Retriable exception: {ex.Message}"); + return true; + } + } + } +} diff --git a/src/Runner.Listener/Program.cs b/src/Runner.Listener/Program.cs new file mode 100644 index 00000000000..61bd9ad670b --- /dev/null +++ b/src/Runner.Listener/Program.cs @@ -0,0 +1,140 @@ +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.Globalization; +using System.IO; +using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Runner.Listener +{ + public static class Program + { + public static int Main(string[] args) + { + using (HostContext context = new HostContext("Runner")) + { + return MainAsync(context, args).GetAwaiter().GetResult(); + } + } + + // Return code definition: (this will be used by service host to determine whether it will re-launch Runner.Listener) + // 0: Runner exit + // 1: Terminate failure + // 2: Retriable failure + // 3: Exit for self update + public async static Task MainAsync(IHostContext context, string[] args) + { + Tracing trace = context.GetTrace(nameof(GitHub.Runner.Listener)); + trace.Info($"Runner is built for {Constants.Runner.Platform} ({Constants.Runner.PlatformArchitecture}) - {BuildConstants.RunnerPackage.PackageName}."); + trace.Info($"RuntimeInformation: {RuntimeInformation.OSDescription}."); + context.WritePerfCounter("RunnerProcessStarted"); + var terminal = context.GetService(); + + // Validate the binaries intended for one OS are not running on a different OS. + switch (Constants.Runner.Platform) + { + case Constants.OSPlatform.Linux: + if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux)) + { + terminal.WriteLine("This runner version is built for Linux. Please install a correct build for your OS."); + return Constants.Runner.ReturnCode.TerminatedError; + } + break; + case Constants.OSPlatform.OSX: + if (!RuntimeInformation.IsOSPlatform(OSPlatform.OSX)) + { + terminal.WriteLine("This runner version is built for OSX. Please install a correct build for your OS."); + return Constants.Runner.ReturnCode.TerminatedError; + } + break; + case Constants.OSPlatform.Windows: + if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) + { + terminal.WriteLine("This runner version is built for Windows. Please install a correct build for your OS."); + return Constants.Runner.ReturnCode.TerminatedError; + } + break; + default: + terminal.WriteLine($"Running the runner on this platform is not supported. The current platform is {RuntimeInformation.OSDescription} and it was built for {Constants.Runner.Platform.ToString()}."); + return Constants.Runner.ReturnCode.TerminatedError; + } + + try + { + trace.Info($"Version: {BuildConstants.RunnerPackage.Version}"); + trace.Info($"Commit: {BuildConstants.Source.CommitHash}"); + trace.Info($"Culture: {CultureInfo.CurrentCulture.Name}"); + trace.Info($"UI Culture: {CultureInfo.CurrentUICulture.Name}"); + + // Validate directory permissions. + string runnerDirectory = context.GetDirectory(WellKnownDirectory.Root); + trace.Info($"Validating directory permissions for: '{runnerDirectory}'"); + try + { + IOUtil.ValidateExecutePermission(runnerDirectory); + } + catch (Exception e) + { + terminal.WriteError($"An error occurred: {e.Message}"); + trace.Error(e); + return Constants.Runner.ReturnCode.TerminatedError; + } + + // Add environment variables from .env file + string envFile = Path.Combine(context.GetDirectory(WellKnownDirectory.Root), ".env"); + if (File.Exists(envFile)) + { + var envContents = File.ReadAllLines(envFile); + foreach (var env in envContents) + { + if (!string.IsNullOrEmpty(env) && env.IndexOf('=') > 0) + { + string envKey = env.Substring(0, env.IndexOf('=')); + string envValue = env.Substring(env.IndexOf('=') + 1); + Environment.SetEnvironmentVariable(envKey, envValue); + } + } + } + + // Parse the command line args. + var command = new CommandSettings(context, args); + trace.Info("Arguments parsed"); + + // Up front validation, warn for unrecognized commandline args. + var unknownCommandlines = command.Validate(); + if (unknownCommandlines.Count > 0) + { + terminal.WriteError($"Unrecognized command-line input arguments: '{string.Join(", ", unknownCommandlines)}'. For usage refer to: .\\config.cmd --help or ./config.sh --help"); + } + + // Defer to the Runner class to execute the command. + IRunner runner = context.GetService(); + try + { + return await runner.ExecuteCommand(command); + } + catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested) + { + trace.Info("Runner execution been cancelled."); + return Constants.Runner.ReturnCode.Success; + } + catch (NonRetryableException e) + { + terminal.WriteError($"An error occurred: {e.Message}"); + trace.Error(e); + return Constants.Runner.ReturnCode.TerminatedError; + } + + } + catch (Exception e) + { + terminal.WriteError($"An error occurred: {e.Message}"); + trace.Error(e); + return Constants.Runner.ReturnCode.RetryableError; + } + } + } +} diff --git a/src/Runner.Listener/Runner.Listener.csproj b/src/Runner.Listener/Runner.Listener.csproj new file mode 100644 index 00000000000..37bf785e256 --- /dev/null +++ b/src/Runner.Listener/Runner.Listener.csproj @@ -0,0 +1,70 @@ + + + + netcoreapp2.2 + Exe + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + + + + + + + + + + + + + + + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Runner.Listener/SelfUpdater.cs b/src/Runner.Listener/SelfUpdater.cs new file mode 100644 index 00000000000..6290a702337 --- /dev/null +++ b/src/Runner.Listener/SelfUpdater.cs @@ -0,0 +1,461 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using System; +using System.Diagnostics; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.WebApi; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Listener +{ + [ServiceLocator(Default = typeof(SelfUpdater))] + public interface ISelfUpdater : IRunnerService + { + Task SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token); + } + + public class SelfUpdater : RunnerService, ISelfUpdater + { + private static string _packageType = "agent"; + private static string _platform = BuildConstants.RunnerPackage.PackageName; + + private PackageMetadata _targetPackage; + private ITerminal _terminal; + private IRunnerServer _runnerServer; + private int _poolId; + private int _agentId; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _terminal = hostContext.GetService(); + _runnerServer = HostContext.GetService(); + var configStore = HostContext.GetService(); + var settings = configStore.GetSettings(); + _poolId = settings.PoolId; + _agentId = settings.AgentId; + } + + public async Task SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token) + { + if (!await UpdateNeeded(updateMessage.TargetVersion, token)) + { + Trace.Info($"Can't find available update package."); + return false; + } + + Trace.Info($"An update is available."); + + // Print console line that warn user not shutdown runner. + await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner."); + await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner"); + + await DownloadLatestRunner(token); + Trace.Info($"Download latest runner and unzip into runner root."); + + // wait till all running job finish + await UpdateRunnerUpdateStateAsync("Waiting for current job finish running."); + + await jobDispatcher.WaitAsync(token); + Trace.Info($"All running job has exited."); + + // delete runner backup + DeletePreviousVersionRunnerBackup(token); + Trace.Info($"Delete old version runner backup."); + + // generate update script from template + await UpdateRunnerUpdateStateAsync("Generate and execute update script."); + + string updateScript = GenerateUpdateScript(restartInteractiveRunner); + Trace.Info($"Generate update script into: {updateScript}"); + + // kick off update script + Process invokeScript = new Process(); +#if OS_WINDOWS + invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace); + invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\""; +#elif (OS_OSX || OS_LINUX) + invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace); + invokeScript.StartInfo.Arguments = $"\"{updateScript}\""; +#endif + invokeScript.Start(); + Trace.Info($"Update script start running"); + + await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds."); + + return true; + } + + private async Task UpdateNeeded(string targetVersion, CancellationToken token) + { + // when talk to old version server, always prefer latest package. + // old server won't send target version as part of update message. + if (string.IsNullOrEmpty(targetVersion)) + { + var packages = await _runnerServer.GetPackagesAsync(_packageType, _platform, 1, token); + if (packages == null || packages.Count == 0) + { + Trace.Info($"There is no package for {_packageType} and {_platform}."); + return false; + } + + _targetPackage = packages.FirstOrDefault(); + } + else + { + _targetPackage = await _runnerServer.GetPackageAsync(_packageType, _platform, targetVersion, token); + if (_targetPackage == null) + { + Trace.Info($"There is no package for {_packageType} and {_platform} with version {targetVersion}."); + return false; + } + } + + Trace.Info($"Version '{_targetPackage.Version}' of '{_targetPackage.Type}' package available in server."); + PackageVersion serverVersion = new PackageVersion(_targetPackage.Version); + Trace.Info($"Current running runner version is {BuildConstants.RunnerPackage.Version}"); + PackageVersion runnerVersion = new PackageVersion(BuildConstants.RunnerPackage.Version); + + return serverVersion.CompareTo(runnerVersion) > 0; + } + + /// + /// _work + /// \_update + /// \bin + /// \externals + /// \run.sh + /// \run.cmd + /// \package.zip //temp download .zip/.tar.gz + /// + /// + /// + private async Task DownloadLatestRunner(CancellationToken token) + { + string latestRunnerDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), Constants.Path.UpdateDirectory); + IOUtil.DeleteDirectory(latestRunnerDirectory, token); + Directory.CreateDirectory(latestRunnerDirectory); + + int runnerSuffix = 1; + string archiveFile = null; + bool downloadSucceeded = false; + + try + { + // Download the runner, using multiple attempts in order to be resilient against any networking/CDN issues + for (int attempt = 1; attempt <= Constants.RunnerDownloadRetryMaxAttempts; attempt++) + { + // Generate an available package name, and do our best effort to clean up stale local zip files + while (true) + { + if (_targetPackage.Platform.StartsWith("win")) + { + archiveFile = Path.Combine(latestRunnerDirectory, $"runner{runnerSuffix}.zip"); + } + else + { + archiveFile = Path.Combine(latestRunnerDirectory, $"runner{runnerSuffix}.tar.gz"); + } + + try + { + // delete .zip file + if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile)) + { + Trace.Verbose("Deleting latest runner package zip '{0}'", archiveFile); + IOUtil.DeleteFile(archiveFile); + } + + break; + } + catch (Exception ex) + { + // couldn't delete the file for whatever reason, so generate another name + Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex); + runnerSuffix++; + } + } + + // Allow a 15-minute package download timeout, which is good enough to update the runner from a 1 Mbit/s ADSL connection. + if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_DOWNLOAD_TIMEOUT") ?? string.Empty, out int timeoutSeconds)) + { + timeoutSeconds = 15 * 60; + } + + Trace.Info($"Attempt {attempt}: save latest runner into {archiveFile}."); + + using (var downloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds))) + using (var downloadCts = CancellationTokenSource.CreateLinkedTokenSource(downloadTimeout.Token, token)) + { + try + { + Trace.Info($"Download runner: begin download"); + + //open zip stream in async mode + using (HttpClient httpClient = new HttpClient(HostContext.CreateHttpClientHandler())) + using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true)) + using (Stream result = await httpClient.GetStreamAsync(_targetPackage.DownloadUrl)) + { + //81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k). + await result.CopyToAsync(fs, 81920, downloadCts.Token); + await fs.FlushAsync(downloadCts.Token); + } + + Trace.Info($"Download runner: finished download"); + downloadSucceeded = true; + break; + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + Trace.Info($"Runner download has been canceled."); + throw; + } + catch (Exception ex) + { + if (downloadCts.Token.IsCancellationRequested) + { + Trace.Warning($"Runner download has timed out after {timeoutSeconds} seconds"); + } + + Trace.Warning($"Failed to get package '{archiveFile}' from '{_targetPackage.DownloadUrl}'. Exception {ex}"); + } + } + } + + if (!downloadSucceeded) + { + throw new TaskCanceledException($"Runner package '{archiveFile}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts"); + } + + // If we got this far, we know that we've successfully downloaded the runner package + if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase)) + { + ZipFile.ExtractToDirectory(archiveFile, latestRunnerDirectory); + } + else if (archiveFile.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase)) + { + string tar = WhichUtil.Which("tar", trace: Trace); + + if (string.IsNullOrEmpty(tar)) + { + throw new NotSupportedException($"tar -xzf"); + } + + // tar -xzf + using (var processInvoker = HostContext.CreateService()) + { + processInvoker.OutputDataReceived += new EventHandler((sender, args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + Trace.Info(args.Data); + } + }); + + processInvoker.ErrorDataReceived += new EventHandler((sender, args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + Trace.Error(args.Data); + } + }); + + int exitCode = await processInvoker.ExecuteAsync(latestRunnerDirectory, tar, $"-xzf \"{archiveFile}\"", null, token); + if (exitCode != 0) + { + throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}."); + } + } + } + else + { + throw new NotSupportedException($"{archiveFile}"); + } + + Trace.Info($"Finished getting latest runner package at: {latestRunnerDirectory}."); + } + finally + { + try + { + // delete .zip file + if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile)) + { + Trace.Verbose("Deleting latest runner package zip: {0}", archiveFile); + IOUtil.DeleteFile(archiveFile); + } + } + catch (Exception ex) + { + //it is not critical if we fail to delete the .zip file + Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex); + } + } + + // copy latest runner into runner root folder + // copy bin from _work/_update -> bin.version under root + string binVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.BinDirectory}.{_targetPackage.Version}"); + Directory.CreateDirectory(binVersionDir); + Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory)} to {binVersionDir}."); + IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.BinDirectory), binVersionDir, token); + + // copy externals from _work/_update -> externals.version under root + string externalsVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.ExternalsDirectory}.{_targetPackage.Version}"); + Directory.CreateDirectory(externalsVersionDir); + Trace.Info($"Copy {Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory)} to {externalsVersionDir}."); + IOUtil.CopyDirectory(Path.Combine(latestRunnerDirectory, Constants.Path.ExternalsDirectory), externalsVersionDir, token); + + // copy and replace all .sh/.cmd files + Trace.Info($"Copy any remaining .sh/.cmd files into runner root."); + foreach (FileInfo file in new DirectoryInfo(latestRunnerDirectory).GetFiles() ?? new FileInfo[0]) + { + // Copy and replace the file. + file.CopyTo(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name), true); + } + } + + private void DeletePreviousVersionRunnerBackup(CancellationToken token) + { + // delete previous backup runner (back compat, can be remove after serval sprints) + // bin.bak.2.99.0 + // externals.bak.2.99.0 + foreach (string existBackUp in Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "*.bak.*")) + { + Trace.Info($"Delete existing runner backup at {existBackUp}."); + try + { + IOUtil.DeleteDirectory(existBackUp, token); + } + catch (Exception ex) when (!(ex is OperationCanceledException)) + { + Trace.Error(ex); + Trace.Info($"Catch exception during delete backup folder {existBackUp}, ignore this error try delete the backup folder on next auto-update."); + } + } + + // delete old bin.2.99.0 folder, only leave the current version and the latest download version + var allBinDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "bin.*"); + if (allBinDirs.Length > 2) + { + // there are more than 2 bin.version folder. + // delete older bin.version folders. + foreach (var oldBinDir in allBinDirs) + { + if (string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin"), StringComparison.OrdinalIgnoreCase) || + string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) || + string.Equals(oldBinDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"bin.{_targetPackage.Version}"), StringComparison.OrdinalIgnoreCase)) + { + // skip for current runner version + continue; + } + + Trace.Info($"Delete runner bin folder's backup at {oldBinDir}."); + try + { + IOUtil.DeleteDirectory(oldBinDir, token); + } + catch (Exception ex) when (!(ex is OperationCanceledException)) + { + Trace.Error(ex); + Trace.Info($"Catch exception during delete backup folder {oldBinDir}, ignore this error try delete the backup folder on next auto-update."); + } + } + } + + // delete old externals.2.99.0 folder, only leave the current version and the latest download version + var allExternalsDirs = Directory.GetDirectories(HostContext.GetDirectory(WellKnownDirectory.Root), "externals.*"); + if (allExternalsDirs.Length > 2) + { + // there are more than 2 externals.version folder. + // delete older externals.version folders. + foreach (var oldExternalDir in allExternalsDirs) + { + if (string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals"), StringComparison.OrdinalIgnoreCase) || + string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{BuildConstants.RunnerPackage.Version}"), StringComparison.OrdinalIgnoreCase) || + string.Equals(oldExternalDir, Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"externals.{_targetPackage.Version}"), StringComparison.OrdinalIgnoreCase)) + { + // skip for current runner version + continue; + } + + Trace.Info($"Delete runner externals folder's backup at {oldExternalDir}."); + try + { + IOUtil.DeleteDirectory(oldExternalDir, token); + } + catch (Exception ex) when (!(ex is OperationCanceledException)) + { + Trace.Error(ex); + Trace.Info($"Catch exception during delete backup folder {oldExternalDir}, ignore this error try delete the backup folder on next auto-update."); + } + } + } + } + + private string GenerateUpdateScript(bool restartInteractiveRunner) + { + int processId = Process.GetCurrentProcess().Id; + string updateLog = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), $"SelfUpdate-{DateTime.UtcNow.ToString("yyyyMMdd-HHmmss")}.log"); + string runnerRoot = HostContext.GetDirectory(WellKnownDirectory.Root); + +#if OS_WINDOWS + string templateName = "update.cmd.template"; +#else + string templateName = "update.sh.template"; +#endif + + string templatePath = Path.Combine(runnerRoot, $"bin.{_targetPackage.Version}", templateName); + string template = File.ReadAllText(templatePath); + + template = template.Replace("_PROCESS_ID_", processId.ToString()); + template = template.Replace("_RUNNER_PROCESS_NAME_", $"Runner.Listener{IOUtil.ExeExtension}"); + template = template.Replace("_ROOT_FOLDER_", runnerRoot); + template = template.Replace("_EXIST_RUNNER_VERSION_", BuildConstants.RunnerPackage.Version); + template = template.Replace("_DOWNLOAD_RUNNER_VERSION_", _targetPackage.Version); + template = template.Replace("_UPDATE_LOG_", updateLog); + template = template.Replace("_RESTART_INTERACTIVE_RUNNER_", restartInteractiveRunner ? "1" : "0"); + +#if OS_WINDOWS + string scriptName = "_update.cmd"; +#else + string scriptName = "_update.sh"; +#endif + + string updateScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), scriptName); + if (File.Exists(updateScript)) + { + IOUtil.DeleteFile(updateScript); + } + + File.WriteAllText(updateScript, template); + return updateScript; + } + + private async Task UpdateRunnerUpdateStateAsync(string currentState) + { + _terminal.WriteLine(currentState); + + try + { + await _runnerServer.UpdateAgentUpdateStateAsync(_poolId, _agentId, currentState); + } + catch (VssResourceNotFoundException) + { + // ignore VssResourceNotFoundException, this exception means the runner is configured against a old server that doesn't support report runner update detail. + Trace.Info($"Catch VssResourceNotFoundException during report update state, ignore this error for backcompat."); + } + catch (Exception ex) + { + Trace.Error(ex); + Trace.Info($"Catch exception during report update state, ignore this error and continue auto-update."); + } + } + } +} diff --git a/src/Runner.PluginHost/Program.cs b/src/Runner.PluginHost/Program.cs new file mode 100644 index 00000000000..d823c6fa1af --- /dev/null +++ b/src/Runner.PluginHost/Program.cs @@ -0,0 +1,109 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Runtime.Loader; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Sdk; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.Runner.PluginHost +{ + public static class Program + { + private static CancellationTokenSource tokenSource = new CancellationTokenSource(); + private static string executingAssemblyLocation = string.Empty; + + public static int Main(string[] args) + { + Console.CancelKeyPress += Console_CancelKeyPress; + + // Set encoding to UTF8, process invoker will use UTF8 write to STDIN + Console.InputEncoding = Encoding.UTF8; + Console.OutputEncoding = Encoding.UTF8; + try + { + ArgUtil.NotNull(args, nameof(args)); + ArgUtil.Equal(2, args.Length, nameof(args.Length)); + + string pluginType = args[0]; + if (string.Equals("action", pluginType, StringComparison.OrdinalIgnoreCase)) + { + string assemblyQualifiedName = args[1]; + ArgUtil.NotNullOrEmpty(assemblyQualifiedName, nameof(assemblyQualifiedName)); + + string serializedContext = Console.ReadLine(); + ArgUtil.NotNullOrEmpty(serializedContext, nameof(serializedContext)); + + RunnerActionPluginExecutionContext executionContext = StringUtil.ConvertFromJson(serializedContext); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + + VariableValue culture; + ArgUtil.NotNull(executionContext.Variables, nameof(executionContext.Variables)); + if (executionContext.Variables.TryGetValue("system.culture", out culture) && + !string.IsNullOrEmpty(culture?.Value)) + { + CultureInfo.DefaultThreadCurrentCulture = new CultureInfo(culture.Value); + CultureInfo.DefaultThreadCurrentUICulture = new CultureInfo(culture.Value); + } + + AssemblyLoadContext.Default.Resolving += ResolveAssembly; + try + { + Type type = Type.GetType(assemblyQualifiedName, throwOnError: true); + var taskPlugin = Activator.CreateInstance(type) as IRunnerActionPlugin; + ArgUtil.NotNull(taskPlugin, nameof(taskPlugin)); + taskPlugin.RunAsync(executionContext, tokenSource.Token).GetAwaiter().GetResult(); + } + catch (Exception ex) + { + // any exception throw from plugin will fail the task. + executionContext.Error(ex.Message); + executionContext.Debug(ex.StackTrace); + return 1; + } + finally + { + AssemblyLoadContext.Default.Resolving -= ResolveAssembly; + } + + return 0; + } + else + { + throw new ArgumentOutOfRangeException(pluginType); + } + } + catch (Exception ex) + { + // infrastructure failure. + Console.Error.WriteLine(ex.ToString()); + return 1; + } + finally + { + Console.CancelKeyPress -= Console_CancelKeyPress; + } + } + + private static Assembly ResolveAssembly(AssemblyLoadContext context, AssemblyName assembly) + { + string assemblyFilename = assembly.Name + ".dll"; + if (string.IsNullOrEmpty(executingAssemblyLocation)) + { + executingAssemblyLocation = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); + } + return context.LoadFromAssemblyPath(Path.Combine(executingAssemblyLocation, assemblyFilename)); + } + + private static void Console_CancelKeyPress(object sender, ConsoleCancelEventArgs e) + { + e.Cancel = true; + tokenSource.Cancel(); + } + } +} diff --git a/src/Runner.PluginHost/Runner.PluginHost.csproj b/src/Runner.PluginHost/Runner.PluginHost.csproj new file mode 100644 index 00000000000..6c4fede740e --- /dev/null +++ b/src/Runner.PluginHost/Runner.PluginHost.csproj @@ -0,0 +1,63 @@ + + + + netcoreapp2.2 + Exe + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + + + + + + + + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Runner.Plugins/Artifact/BuildServer.cs b/src/Runner.Plugins/Artifact/BuildServer.cs new file mode 100644 index 00000000000..1d6d8b4aa50 --- /dev/null +++ b/src/Runner.Plugins/Artifact/BuildServer.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Sdk; +using GitHub.Services.WebApi; +using GitHub.Build.WebApi; + +namespace GitHub.Runner.Plugins.Artifact +{ + // A client wrapper interacting with Build's Artifact API + public class BuildServer + { + private readonly BuildHttpClient _buildHttpClient; + + public BuildServer(VssConnection connection) + { + ArgUtil.NotNull(connection, nameof(connection)); + _buildHttpClient = connection.GetClient(); + } + + // Associate the specified artifact with a build, along with custom data. + public async Task AssociateArtifact( + Guid projectId, + int pipelineId, + string jobId, + string name, + string type, + string data, + Dictionary propertiesDictionary, + CancellationToken cancellationToken = default(CancellationToken)) + { + BuildArtifact artifact = new BuildArtifact() + { + Name = name, + Source = jobId, + Resource = new ArtifactResource() + { + Data = data, + Type = type, + Properties = propertiesDictionary + } + }; + + return await _buildHttpClient.CreateArtifactAsync(artifact, projectId, pipelineId, cancellationToken: cancellationToken); + } + + // Get named artifact from a build + public async Task GetArtifact( + Guid projectId, + int pipelineId, + string name, + CancellationToken cancellationToken) + { + return await _buildHttpClient.GetArtifactAsync(projectId, pipelineId, name, cancellationToken: cancellationToken); + } + } +} diff --git a/src/Runner.Plugins/Artifact/DownloadArtifact.cs b/src/Runner.Plugins/Artifact/DownloadArtifact.cs new file mode 100644 index 00000000000..4ea75a355cc --- /dev/null +++ b/src/Runner.Plugins/Artifact/DownloadArtifact.cs @@ -0,0 +1,79 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Build.WebApi; +using GitHub.Services.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Plugins.Artifact +{ + public class DownloadArtifact : IRunnerActionPlugin + { + + private static class DownloadArtifactInputNames + { + public static readonly string Name = "name"; + public static readonly string ArtifactName = "artifact"; + public static readonly string Path = "path"; + } + + public async Task RunAsync( + RunnerActionPluginExecutionContext context, + CancellationToken token) + { + ArgUtil.NotNull(context, nameof(context)); + string artifactName = context.GetInput(DownloadArtifactInputNames.ArtifactName, required: false); // Back compat since we rename input `artifact` to `name` + if (string.IsNullOrEmpty(artifactName)) + { + artifactName = context.GetInput(DownloadArtifactInputNames.Name, required: true); + } + + string targetPath = context.GetInput(DownloadArtifactInputNames.Path, required: false); + string defaultWorkingDirectory = context.GetGitHubContext("workspace"); + + if (string.IsNullOrEmpty(targetPath)) + { + targetPath = artifactName; + } + + targetPath = Path.IsPathFullyQualified(targetPath) ? targetPath : Path.GetFullPath(Path.Combine(defaultWorkingDirectory, targetPath)); + + // Project ID + Guid projectId = new Guid(context.Variables.GetValueOrDefault(BuildVariables.TeamProjectId)?.Value ?? Guid.Empty.ToString()); + + // Build ID + string buildIdStr = context.Variables.GetValueOrDefault(BuildVariables.BuildId)?.Value ?? string.Empty; + if (!int.TryParse(buildIdStr, out int buildId)) + { + throw new ArgumentException($"Run Id is not an Int32: {buildIdStr}"); + } + + context.Output($"Download artifact '{artifactName}' to: '{targetPath}'"); + + BuildServer buildHelper = new BuildServer(context.VssConnection); + BuildArtifact buildArtifact = await buildHelper.GetArtifact(projectId, buildId, artifactName, token); + + if (string.Equals(buildArtifact.Resource.Type, "Container", StringComparison.OrdinalIgnoreCase)) + { + string containerUrl = buildArtifact.Resource.Data; + string[] parts = containerUrl.Split(new[] { '/' }, 3); + if (parts.Length < 3 || !long.TryParse(parts[1], out long containerId)) + { + throw new ArgumentOutOfRangeException($"Invalid container url '{containerUrl}' for artifact '{buildArtifact.Name}'"); + } + + string containerPath = parts[2]; + FileContainerServer fileContainerServer = new FileContainerServer(context.VssConnection, projectId, containerId, containerPath); + await fileContainerServer.DownloadFromContainerAsync(context, targetPath, token); + } + else + { + throw new NotSupportedException($"Invalid artifact type: {buildArtifact.Resource.Type}"); + } + + context.Output("Artifact download finished."); + } + } +} diff --git a/src/Runner.Plugins/Artifact/FileContainerServer.cs b/src/Runner.Plugins/Artifact/FileContainerServer.cs new file mode 100644 index 00000000000..037d161d9ee --- /dev/null +++ b/src/Runner.Plugins/Artifact/FileContainerServer.cs @@ -0,0 +1,660 @@ +using GitHub.Services.FileContainer.Client; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using System.Diagnostics; +using GitHub.Services.WebApi; +using System.Net.Http; +using System.Net; +using GitHub.Runner.Sdk; +using GitHub.Services.FileContainer; +using GitHub.Services.Common; + +namespace GitHub.Runner.Plugins.Artifact +{ + public class FileContainerServer + { + private const int _defaultFileStreamBufferSize = 4096; + + //81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k). + private const int _defaultCopyBufferSize = 81920; + + private readonly ConcurrentQueue _fileUploadQueue = new ConcurrentQueue(); + private readonly ConcurrentQueue _fileDownloadQueue = new ConcurrentQueue(); + private readonly ConcurrentDictionary> _fileUploadTraceLog = new ConcurrentDictionary>(); + private readonly ConcurrentDictionary> _fileUploadProgressLog = new ConcurrentDictionary>(); + private readonly FileContainerHttpClient _fileContainerHttpClient; + + private CancellationTokenSource _uploadCancellationTokenSource; + private CancellationTokenSource _downloadCancellationTokenSource; + private TaskCompletionSource _uploadFinished; + private TaskCompletionSource _downloadFinished; + private Guid _projectId; + private long _containerId; + private string _containerPath; + private int _uploadFilesProcessed = 0; + private int _downloadFilesProcessed = 0; + private string _sourceParentDirectory; + + public FileContainerServer( + VssConnection connection, + Guid projectId, + long containerId, + string containerPath) + { + _projectId = projectId; + _containerId = containerId; + _containerPath = containerPath; + + // default file upload/download request timeout to 600 seconds + var fileContainerClientConnectionSetting = connection.Settings.Clone(); + if (fileContainerClientConnectionSetting.SendTimeout < TimeSpan.FromSeconds(600)) + { + fileContainerClientConnectionSetting.SendTimeout = TimeSpan.FromSeconds(600); + } + + var fileContainerClientConnection = new VssConnection(connection.Uri, connection.Credentials, fileContainerClientConnectionSetting); + _fileContainerHttpClient = fileContainerClientConnection.GetClient(); + } + + public async Task DownloadFromContainerAsync( + RunnerActionPluginExecutionContext context, + String destination, + CancellationToken cancellationToken) + { + // Find out all container items need to be processed + List containerItems = new List(); + int retryCount = 0; + while (retryCount < 3) + { + try + { + containerItems = await _fileContainerHttpClient.QueryContainerItemsAsync(_containerId, + _projectId, + _containerPath, + cancellationToken: cancellationToken); + break; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + context.Debug($"Container query has been cancelled."); + throw; + } + catch (Exception ex) when (retryCount < 2) + { + retryCount++; + context.Warning($"Fail to query container items under #/{_containerId}/{_containerPath}, Error: {ex.Message}"); + context.Debug(ex.ToString()); + } + + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(15)); + context.Warning($"Back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + + if (containerItems.Count == 0) + { + context.Output($"There is nothing under #/{_containerId}/{_containerPath}"); + return; + } + + // container items will include both folders, files and even file with zero size + // Create all required empty folders and emptry files, gather a list of files that we need to download from server. + int foldersCreated = 0; + int emptryFilesCreated = 0; + List downloadFiles = new List(); + foreach (var item in containerItems.OrderBy(x => x.Path)) + { + if (!item.Path.StartsWith(_containerPath, StringComparison.OrdinalIgnoreCase)) + { + throw new ArgumentOutOfRangeException($"Item {item.Path} is not under #/{_containerId}/{_containerPath}"); + } + + var localRelativePath = item.Path.Substring(_containerPath.Length).TrimStart('/'); + var localPath = Path.Combine(destination, localRelativePath); + + if (item.ItemType == ContainerItemType.Folder) + { + context.Debug($"Ensure folder exists: {localPath}"); + Directory.CreateDirectory(localPath); + foldersCreated++; + } + else if (item.ItemType == ContainerItemType.File) + { + if (item.FileLength == 0) + { + context.Debug($"Create empty file at: {localPath}"); + var parentDirectory = Path.GetDirectoryName(localPath); + Directory.CreateDirectory(parentDirectory); + IOUtil.DeleteFile(localPath); + using (new FileStream(localPath, FileMode.Create)) + { + } + emptryFilesCreated++; + } + else + { + context.Debug($"Prepare download {item.Path} to {localPath}"); + downloadFiles.Add(new DownloadInfo(item.Path, localPath)); + } + } + else + { + throw new NotSupportedException(item.ItemType.ToString()); + } + } + + if (foldersCreated > 0) + { + context.Output($"{foldersCreated} folders created."); + } + + if (emptryFilesCreated > 0) + { + context.Output($"{emptryFilesCreated} empty files created."); + } + + if (downloadFiles.Count == 0) + { + context.Output($"There is nothing to download"); + return; + } + + // Start multi-task to download all files. + using (_downloadCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) + { + // try download all files for the first time. + DownloadResult downloadResult = await ParallelDownloadAsync(context, downloadFiles.AsReadOnly(), Math.Min(downloadFiles.Count, Environment.ProcessorCount), _downloadCancellationTokenSource.Token); + if (downloadResult.FailedFiles.Count == 0) + { + // all files have been download succeed. + context.Output($"{downloadFiles.Count} files download succeed."); + return; + } + else + { + context.Output($"{downloadResult.FailedFiles.Count} files failed to download, retry these files after a minute."); + } + + // Delay 1 min then retry failed files. + for (int timer = 60; timer > 0; timer -= 5) + { + context.Output($"Retry file download after {timer} seconds."); + await Task.Delay(TimeSpan.FromSeconds(5), _uploadCancellationTokenSource.Token); + } + + // Retry download all failed files. + context.Output($"Start retry {downloadResult.FailedFiles.Count} failed files upload."); + DownloadResult retryDownloadResult = await ParallelDownloadAsync(context, downloadResult.FailedFiles.AsReadOnly(), Math.Min(downloadResult.FailedFiles.Count, Environment.ProcessorCount), _downloadCancellationTokenSource.Token); + if (retryDownloadResult.FailedFiles.Count == 0) + { + // all files have been download succeed after retry. + context.Output($"{downloadResult.FailedFiles} files download succeed after retry."); + return; + } + else + { + throw new Exception($"{retryDownloadResult.FailedFiles.Count} files failed to download even after retry."); + } + } + } + + public async Task CopyToContainerAsync( + RunnerActionPluginExecutionContext context, + String source, + CancellationToken cancellationToken) + { + //set maxConcurrentUploads up to 2 until figure out how to use WinHttpHandler.MaxConnectionsPerServer modify DefaultConnectionLimit + int maxConcurrentUploads = Math.Min(Environment.ProcessorCount, 2); + //context.Output($"Max Concurrent Uploads {maxConcurrentUploads}"); + + List files; + if (File.Exists(source)) + { + files = new List() { source }; + _sourceParentDirectory = Path.GetDirectoryName(source); + } + else + { + files = Directory.EnumerateFiles(source, "*", SearchOption.AllDirectories).ToList(); + _sourceParentDirectory = source.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } + + context.Output($"Uploading {files.Count()} files"); + using (_uploadCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) + { + // hook up reporting event from file container client. + _fileContainerHttpClient.UploadFileReportTrace += UploadFileTraceReportReceived; + _fileContainerHttpClient.UploadFileReportProgress += UploadFileProgressReportReceived; + + try + { + // try upload all files for the first time. + UploadResult uploadResult = await ParallelUploadAsync(context, files, maxConcurrentUploads, _uploadCancellationTokenSource.Token); + + if (uploadResult.FailedFiles.Count == 0) + { + // all files have been upload succeed. + context.Output("File upload succeed."); + return uploadResult.TotalFileSizeUploaded; + } + else + { + context.Output($"{uploadResult.FailedFiles.Count} files failed to upload, retry these files after a minute."); + } + + // Delay 1 min then retry failed files. + for (int timer = 60; timer > 0; timer -= 5) + { + context.Output($"Retry file upload after {timer} seconds."); + await Task.Delay(TimeSpan.FromSeconds(5), _uploadCancellationTokenSource.Token); + } + + // Retry upload all failed files. + context.Output($"Start retry {uploadResult.FailedFiles.Count} failed files upload."); + UploadResult retryUploadResult = await ParallelUploadAsync(context, uploadResult.FailedFiles, maxConcurrentUploads, _uploadCancellationTokenSource.Token); + + if (retryUploadResult.FailedFiles.Count == 0) + { + // all files have been upload succeed after retry. + context.Output("File upload succeed after retry."); + return uploadResult.TotalFileSizeUploaded + retryUploadResult.TotalFileSizeUploaded; + } + else + { + throw new Exception("File upload failed even after retry."); + } + } + finally + { + _fileContainerHttpClient.UploadFileReportTrace -= UploadFileTraceReportReceived; + _fileContainerHttpClient.UploadFileReportProgress -= UploadFileProgressReportReceived; + } + } + } + + private async Task ParallelDownloadAsync(RunnerActionPluginExecutionContext context, IReadOnlyList files, int concurrentDownloads, CancellationToken token) + { + // return files that fail to download + var downloadResult = new DownloadResult(); + + // nothing needs to download + if (files.Count == 0) + { + return downloadResult; + } + + // ensure the file download queue is empty. + if (!_fileDownloadQueue.IsEmpty) + { + throw new ArgumentOutOfRangeException(nameof(_fileDownloadQueue)); + } + + // enqueue file into download queue. + foreach (var file in files) + { + _fileDownloadQueue.Enqueue(file); + } + + // Start download monitor task. + _downloadFilesProcessed = 0; + _downloadFinished = new TaskCompletionSource(); + Task downloadMonitor = DownloadReportingAsync(context, files.Count(), token); + + // Start parallel download tasks. + List> parallelDownloadingTasks = new List>(); + for (int downloader = 0; downloader < concurrentDownloads; downloader++) + { + parallelDownloadingTasks.Add(DownloadAsync(context, downloader, token)); + } + + // Wait for parallel download finish. + await Task.WhenAll(parallelDownloadingTasks); + foreach (var downloadTask in parallelDownloadingTasks) + { + // record all failed files. + downloadResult.AddDownloadResult(await downloadTask); + } + + // Stop monitor task; + _downloadFinished.TrySetResult(0); + await downloadMonitor; + + return downloadResult; + } + + private async Task ParallelUploadAsync(RunnerActionPluginExecutionContext context, IReadOnlyList files, int concurrentUploads, CancellationToken token) + { + // return files that fail to upload and total artifact size + var uploadResult = new UploadResult(); + + // nothing needs to upload + if (files.Count == 0) + { + return uploadResult; + } + + // ensure the file upload queue is empty. + if (!_fileUploadQueue.IsEmpty) + { + throw new ArgumentOutOfRangeException(nameof(_fileUploadQueue)); + } + + // enqueue file into upload queue. + foreach (var file in files) + { + _fileUploadQueue.Enqueue(file); + } + + // Start upload monitor task. + _uploadFilesProcessed = 0; + _uploadFinished = new TaskCompletionSource(); + _fileUploadTraceLog.Clear(); + _fileUploadProgressLog.Clear(); + Task uploadMonitor = UploadReportingAsync(context, files.Count(), _uploadCancellationTokenSource.Token); + + // Start parallel upload tasks. + List> parallelUploadingTasks = new List>(); + for (int uploader = 0; uploader < concurrentUploads; uploader++) + { + parallelUploadingTasks.Add(UploadAsync(context, uploader, _uploadCancellationTokenSource.Token)); + } + + // Wait for parallel upload finish. + await Task.WhenAll(parallelUploadingTasks); + foreach (var uploadTask in parallelUploadingTasks) + { + // record all failed files. + uploadResult.AddUploadResult(await uploadTask); + } + + // Stop monitor task; + _uploadFinished.TrySetResult(0); + await uploadMonitor; + + return uploadResult; + } + + private async Task DownloadAsync(RunnerActionPluginExecutionContext context, int downloaderId, CancellationToken token) + { + List failedFiles = new List(); + Stopwatch downloadTimer = new Stopwatch(); + while (_fileDownloadQueue.TryDequeue(out DownloadInfo fileToDownload)) + { + token.ThrowIfCancellationRequested(); + try + { + int retryCount = 0; + bool downloadFailed = false; + while (true) + { + try + { + context.Debug($"Start downloading file: '{fileToDownload.ItemPath}' (Downloader {downloaderId})"); + downloadTimer.Restart(); + using (FileStream fs = new FileStream(fileToDownload.LocalPath, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true)) + using (var downloadStream = await _fileContainerHttpClient.DownloadFileAsync(_containerId, fileToDownload.ItemPath, token, _projectId)) + { + await downloadStream.CopyToAsync(fs, _defaultCopyBufferSize, token); + await fs.FlushAsync(token); + downloadTimer.Stop(); + context.Debug($"File: '{fileToDownload.LocalPath}' took {downloadTimer.ElapsedMilliseconds} milliseconds to finish download (Downloader {downloaderId})"); + break; + } + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + context.Debug($"Download has been cancelled while downloading {fileToDownload.ItemPath}. (Downloader {downloaderId})"); + throw; + } + catch (Exception ex) + { + retryCount++; + context.Warning($"Fail to download '{fileToDownload.ItemPath}', error: {ex.Message} (Downloader {downloaderId})"); + context.Debug(ex.ToString()); + } + + if (retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30)); + context.Warning($"Back off {backOff.TotalSeconds} seconds before retry. (Downloader {downloaderId})"); + await Task.Delay(backOff); + } + else + { + // upload still failed after 3 tries. + downloadFailed = true; + break; + } + } + + if (downloadFailed) + { + // tracking file that failed to download. + failedFiles.Add(fileToDownload); + } + + Interlocked.Increment(ref _downloadFilesProcessed); + } + catch (Exception ex) + { + // We should never + context.Error($"Error '{ex.Message}' when downloading file '{fileToDownload}'. (Downloader {downloaderId})"); + throw ex; + } + } + + return new DownloadResult(failedFiles); + } + + private async Task UploadAsync(RunnerActionPluginExecutionContext context, int uploaderId, CancellationToken token) + { + List failedFiles = new List(); + long uploadedSize = 0; + string fileToUpload; + Stopwatch uploadTimer = new Stopwatch(); + while (_fileUploadQueue.TryDequeue(out fileToUpload)) + { + token.ThrowIfCancellationRequested(); + try + { + using (FileStream fs = File.Open(fileToUpload, FileMode.Open, FileAccess.Read, FileShare.Read)) + { + string itemPath = (_containerPath.TrimEnd('/') + "/" + fileToUpload.Remove(0, _sourceParentDirectory.Length + 1)).Replace('\\', '/'); + uploadTimer.Restart(); + bool catchExceptionDuringUpload = false; + HttpResponseMessage response = null; + try + { + response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token, chunkSize: 4 * 1024 * 1024); + } + catch (OperationCanceledException) when (token.IsCancellationRequested) + { + context.Output($"File upload has been cancelled during upload file: '{fileToUpload}'."); + if (response != null) + { + response.Dispose(); + response = null; + } + + throw; + } + catch (Exception ex) + { + catchExceptionDuringUpload = true; + context.Output($"Fail to upload '{fileToUpload}' due to '{ex.Message}'."); + context.Output(ex.ToString()); + } + + uploadTimer.Stop(); + if (catchExceptionDuringUpload || (response != null && response.StatusCode != HttpStatusCode.Created)) + { + if (response != null) + { + context.Output($"Unable to copy file to server StatusCode={response.StatusCode}: {response.ReasonPhrase}. Source file path: {fileToUpload}. Target server path: {itemPath}"); + } + + // output detail upload trace for the file. + ConcurrentQueue logQueue; + if (_fileUploadTraceLog.TryGetValue(itemPath, out logQueue)) + { + context.Output($"Detail upload trace for file that fail to upload: {itemPath}"); + string message; + while (logQueue.TryDequeue(out message)) + { + context.Output(message); + } + } + + // tracking file that failed to upload. + failedFiles.Add(fileToUpload); + } + else + { + context.Debug($"File: '{fileToUpload}' took {uploadTimer.ElapsedMilliseconds} milliseconds to finish upload"); + uploadedSize += fs.Length; + // debug detail upload trace for the file. + ConcurrentQueue logQueue; + if (_fileUploadTraceLog.TryGetValue(itemPath, out logQueue)) + { + context.Debug($"Detail upload trace for file: {itemPath}"); + string message; + while (logQueue.TryDequeue(out message)) + { + context.Debug(message); + } + } + } + + if (response != null) + { + response.Dispose(); + response = null; + } + } + + Interlocked.Increment(ref _uploadFilesProcessed); + } + catch (Exception ex) + { + context.Output($"File error '{ex.Message}' when uploading file '{fileToUpload}'."); + throw ex; + } + } + + return new UploadResult(failedFiles, uploadedSize); + } + + private async Task UploadReportingAsync(RunnerActionPluginExecutionContext context, int totalFiles, CancellationToken token) + { + int traceInterval = 0; + while (!_uploadFinished.Task.IsCompleted && !token.IsCancellationRequested) + { + bool hasDetailProgress = false; + foreach (var file in _fileUploadProgressLog) + { + string message; + while (file.Value.TryDequeue(out message)) + { + hasDetailProgress = true; + context.Output(message); + } + } + + // trace total file progress every 25 seconds when there is no file level detail progress + if (++traceInterval % 2 == 0 && !hasDetailProgress) + { + context.Output($"Total file: {totalFiles} ---- Processed file: {_uploadFilesProcessed} ({(_uploadFilesProcessed * 100) / totalFiles}%)"); + } + + await Task.WhenAny(_uploadFinished.Task, Task.Delay(5000, token)); + } + } + + private async Task DownloadReportingAsync(RunnerActionPluginExecutionContext context, int totalFiles, CancellationToken token) + { + int traceInterval = 0; + while (!_downloadFinished.Task.IsCompleted && !token.IsCancellationRequested) + { + // trace total file progress every 10 seconds when there is no file level detail progress + if (++traceInterval % 2 == 0) + { + context.Output($"Total file: {totalFiles} ---- Downloaded file: {_downloadFilesProcessed} ({(_downloadFilesProcessed * 100) / totalFiles}%)"); + } + + await Task.WhenAny(_downloadFinished.Task, Task.Delay(5000, token)); + } + } + + private void UploadFileTraceReportReceived(object sender, ReportTraceEventArgs e) + { + ConcurrentQueue logQueue = _fileUploadTraceLog.GetOrAdd(e.File, new ConcurrentQueue()); + logQueue.Enqueue(e.Message); + } + + private void UploadFileProgressReportReceived(object sender, ReportProgressEventArgs e) + { + ConcurrentQueue progressQueue = _fileUploadProgressLog.GetOrAdd(e.File, new ConcurrentQueue()); + progressQueue.Enqueue($"Uploading '{e.File}' ({(e.CurrentChunk * 100) / e.TotalChunks}%)"); + } + } + + public class UploadResult + { + public UploadResult() + { + FailedFiles = new List(); + TotalFileSizeUploaded = 0; + } + + public UploadResult(List failedFiles, long totalFileSizeUploaded) + { + FailedFiles = failedFiles; + TotalFileSizeUploaded = totalFileSizeUploaded; + } + public List FailedFiles { get; set; } + + public long TotalFileSizeUploaded { get; set; } + + public void AddUploadResult(UploadResult resultToAdd) + { + this.FailedFiles.AddRange(resultToAdd.FailedFiles); + this.TotalFileSizeUploaded += resultToAdd.TotalFileSizeUploaded; + } + } + + public class DownloadInfo + { + public DownloadInfo(string itemPath, string localPath) + { + this.ItemPath = itemPath; + this.LocalPath = localPath; + } + + public string ItemPath { get; set; } + public string LocalPath { get; set; } + } + + public class DownloadResult + { + public DownloadResult() + { + FailedFiles = new List(); + } + + public DownloadResult(List failedFiles) + { + FailedFiles = failedFiles; + } + public List FailedFiles { get; set; } + + public void AddDownloadResult(DownloadResult resultToAdd) + { + this.FailedFiles.AddRange(resultToAdd.FailedFiles); + } + } +} \ No newline at end of file diff --git a/src/Runner.Plugins/Artifact/PublishArtifact.cs b/src/Runner.Plugins/Artifact/PublishArtifact.cs new file mode 100644 index 00000000000..e6b87c67582 --- /dev/null +++ b/src/Runner.Plugins/Artifact/PublishArtifact.cs @@ -0,0 +1,90 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Build.WebApi; +using GitHub.Services.Common; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Plugins.Artifact +{ + public class PublishArtifact : IRunnerActionPlugin + { + private static class PublishArtifactInputNames + { + public static readonly string ArtifactName = "artifactName"; + public static readonly string Name = "name"; + public static readonly string Path = "path"; + } + + public async Task RunAsync( + RunnerActionPluginExecutionContext context, + CancellationToken token) + { + string artifactName = context.GetInput(PublishArtifactInputNames.ArtifactName, required: false); // Back compat since we rename input `artifactName` to `name` + if (string.IsNullOrEmpty(artifactName)) + { + artifactName = context.GetInput(PublishArtifactInputNames.Name, required: true); + } + + string targetPath = context.GetInput(PublishArtifactInputNames.Path, required: true); + string defaultWorkingDirectory = context.GetGitHubContext("workspace"); + + targetPath = Path.IsPathFullyQualified(targetPath) ? targetPath : Path.GetFullPath(Path.Combine(defaultWorkingDirectory, targetPath)); + + if (String.IsNullOrWhiteSpace(artifactName)) + { + throw new ArgumentException($"Artifact name can not be empty string"); + } + + if (Path.GetInvalidFileNameChars().Any(x => artifactName.Contains(x))) + { + throw new ArgumentException($"Artifact name is not valid: {artifactName}. It cannot contain '\\', '/', \"', ':', '<', '>', '|', '*', and '?'"); + } + + // Project ID + Guid projectId = new Guid(context.Variables.GetValueOrDefault(BuildVariables.TeamProjectId)?.Value ?? Guid.Empty.ToString()); + + // Build ID + string buildIdStr = context.Variables.GetValueOrDefault(BuildVariables.BuildId)?.Value ?? string.Empty; + if (!int.TryParse(buildIdStr, out int buildId)) + { + throw new ArgumentException($"Run Id is not an Int32: {buildIdStr}"); + } + + string fullPath = Path.GetFullPath(targetPath); + bool isFile = File.Exists(fullPath); + bool isDir = Directory.Exists(fullPath); + if (!isFile && !isDir) + { + // if local path is neither file nor folder + throw new FileNotFoundException($"Path does not exist {targetPath}"); + } + + // Container ID + string containerIdStr = context.Variables.GetValueOrDefault(BuildVariables.ContainerId)?.Value ?? string.Empty; + if (!long.TryParse(containerIdStr, out long containerId)) + { + throw new ArgumentException($"Container Id is not a Int64: {containerIdStr}"); + } + + context.Output($"Uploading artifact '{artifactName}' from '{fullPath}' for run #{buildId}"); + + FileContainerServer fileContainerHelper = new FileContainerServer(context.VssConnection, projectId, containerId, artifactName); + long size = await fileContainerHelper.CopyToContainerAsync(context, fullPath, token); + var propertiesDictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + propertiesDictionary.Add("artifactsize", size.ToString()); + + string fileContainerFullPath = StringUtil.Format($"#/{containerId}/{artifactName}"); + context.Output($"Uploaded '{fullPath}' to server"); + + BuildServer buildHelper = new BuildServer(context.VssConnection); + string jobId = context.Variables.GetValueOrDefault(WellKnownDistributedTaskVariables.JobId).Value ?? string.Empty; + var artifact = await buildHelper.AssociateArtifact(projectId, buildId, jobId, artifactName, ArtifactResourceTypes.Container, fileContainerFullPath, propertiesDictionary, token); + context.Output($"Associated artifact {artifactName} ({artifact.Id}) with run #{buildId}"); + } + } +} \ No newline at end of file diff --git a/src/Runner.Plugins/Repository/GitCliManager.cs b/src/Runner.Plugins/Repository/GitCliManager.cs new file mode 100644 index 00000000000..0ed051645a7 --- /dev/null +++ b/src/Runner.Plugins/Repository/GitCliManager.cs @@ -0,0 +1,686 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using System.IO; +using GitHub.Runner.Sdk; +using GitHub.Services.Common; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.Runner.Plugins.Repository +{ + public class GitCliManager + { +#if OS_WINDOWS + private static readonly Encoding s_encoding = Encoding.UTF8; +#else + private static readonly Encoding s_encoding = null; +#endif + private readonly Dictionary gitEnv = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { "GIT_TERMINAL_PROMPT", "0" }, + }; + + private string gitPath = null; + private Version gitVersion = null; + private string gitLfsPath = null; + private Version gitLfsVersion = null; + + public GitCliManager(Dictionary envs = null) + { + if (envs != null) + { + foreach (var env in envs) + { + if (!string.IsNullOrEmpty(env.Key)) + { + gitEnv[env.Key] = env.Value ?? string.Empty; + } + } + } + } + + public bool EnsureGitVersion(Version requiredVersion, bool throwOnNotMatch) + { + ArgUtil.NotNull(gitPath, nameof(gitPath)); + ArgUtil.NotNull(gitVersion, nameof(gitVersion)); + + if (gitVersion < requiredVersion && throwOnNotMatch) + { + throw new NotSupportedException($"Min required git version is '{requiredVersion}', your git ('{gitPath}') version is '{gitVersion}'"); + } + + return gitVersion >= requiredVersion; + } + + public bool EnsureGitLFSVersion(Version requiredVersion, bool throwOnNotMatch) + { + ArgUtil.NotNull(gitLfsPath, nameof(gitLfsPath)); + ArgUtil.NotNull(gitLfsVersion, nameof(gitLfsVersion)); + + if (gitLfsVersion < requiredVersion && throwOnNotMatch) + { + throw new NotSupportedException($"Min required git-lfs version is '{requiredVersion}', your git-lfs ('{gitLfsPath}') version is '{gitLfsVersion}'"); + } + + return gitLfsVersion >= requiredVersion; + } + + public async Task LoadGitExecutionInfo(RunnerActionPluginExecutionContext context) + { + // Resolve the location of git. + gitPath = WhichUtil.Which("git", require: true, trace: context); + ArgUtil.File(gitPath, nameof(gitPath)); + + // Get the Git version. + gitVersion = await GitVersion(context); + ArgUtil.NotNull(gitVersion, nameof(gitVersion)); + context.Debug($"Detect git version: {gitVersion.ToString()}."); + + // Resolve the location of git-lfs. + // This should be best effort since checkout lfs objects is an option. + // We will check and ensure git-lfs version later + gitLfsPath = WhichUtil.Which("git-lfs", require: false, trace: context); + + // Get the Git-LFS version if git-lfs exist in %PATH%. + if (!string.IsNullOrEmpty(gitLfsPath)) + { + gitLfsVersion = await GitLfsVersion(context); + context.Debug($"Detect git-lfs version: '{gitLfsVersion?.ToString() ?? string.Empty}'."); + } + + // required 2.0, all git operation commandline args need min git version 2.0 + Version minRequiredGitVersion = new Version(2, 0); + EnsureGitVersion(minRequiredGitVersion, throwOnNotMatch: true); + + // suggest user upgrade to 2.9 for better git experience + Version recommendGitVersion = new Version(2, 9); + if (!EnsureGitVersion(recommendGitVersion, throwOnNotMatch: false)) + { + context.Output($"To get a better Git experience, upgrade your Git to at least version '{recommendGitVersion}'. Your current Git version is '{gitVersion}'."); + } + + // Set the user agent. + string gitHttpUserAgentEnv = $"git/{gitVersion.ToString()} (github-actions-runner-git/{BuildConstants.RunnerPackage.Version})"; + context.Debug($"Set git useragent to: {gitHttpUserAgentEnv}."); + gitEnv["GIT_HTTP_USER_AGENT"] = gitHttpUserAgentEnv; + } + + // git init + public async Task GitInit(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug($"Init git repository at: {repositoryPath}."); + string repoRootEscapeSpace = StringUtil.Format(@"""{0}""", repositoryPath.Replace(@"""", @"\""")); + return await ExecuteGitCommandAsync(context, repositoryPath, "init", StringUtil.Format($"{repoRootEscapeSpace}")); + } + + // git fetch --tags --prune --progress --no-recurse-submodules [--depth=15] origin [+refs/pull/*:refs/remote/pull/*] + public async Task GitFetch(RunnerActionPluginExecutionContext context, string repositoryPath, string remoteName, int fetchDepth, List refSpec, string additionalCommandLine, CancellationToken cancellationToken) + { + context.Debug($"Fetch git repository at: {repositoryPath} remote: {remoteName}."); + if (refSpec != null && refSpec.Count > 0) + { + refSpec = refSpec.Where(r => !string.IsNullOrEmpty(r)).ToList(); + } + + // default options for git fetch. + string options = StringUtil.Format($"--tags --prune --progress --no-recurse-submodules {remoteName} {string.Join(" ", refSpec)}"); + + // If shallow fetch add --depth arg + // If the local repository is shallowed but there is no fetch depth provide for this build, + // add --unshallow to convert the shallow repository to a complete repository + if (fetchDepth > 0) + { + options = StringUtil.Format($"--tags --prune --progress --no-recurse-submodules --depth={fetchDepth} {remoteName} {string.Join(" ", refSpec)}"); + } + else + { + if (File.Exists(Path.Combine(repositoryPath, ".git", "shallow"))) + { + options = StringUtil.Format($"--tags --prune --progress --no-recurse-submodules --unshallow {remoteName} {string.Join(" ", refSpec)}"); + } + } + + int retryCount = 0; + int fetchExitCode = 0; + while (retryCount < 3) + { + fetchExitCode = await ExecuteGitCommandAsync(context, repositoryPath, "fetch", options, additionalCommandLine, cancellationToken); + if (fetchExitCode == 0) + { + break; + } + else + { + if (++retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + context.Warning($"Git fetch failed with exit code {fetchExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + return fetchExitCode; + } + + // git fetch --no-tags --prune --progress --no-recurse-submodules [--depth=15] origin [+refs/pull/*:refs/remote/pull/*] [+refs/tags/1:refs/tags/1] + public async Task GitFetchNoTags(RunnerActionPluginExecutionContext context, string repositoryPath, string remoteName, int fetchDepth, List refSpec, string additionalCommandLine, CancellationToken cancellationToken) + { + context.Debug($"Fetch git repository at: {repositoryPath} remote: {remoteName}."); + if (refSpec != null && refSpec.Count > 0) + { + refSpec = refSpec.Where(r => !string.IsNullOrEmpty(r)).ToList(); + } + + string options; + + // If shallow fetch add --depth arg + // If the local repository is shallowed but there is no fetch depth provide for this build, + // add --unshallow to convert the shallow repository to a complete repository + if (fetchDepth > 0) + { + options = StringUtil.Format($"--no-tags --prune --progress --no-recurse-submodules --depth={fetchDepth} {remoteName} {string.Join(" ", refSpec)}"); + } + else if (File.Exists(Path.Combine(repositoryPath, ".git", "shallow"))) + { + options = StringUtil.Format($"--no-tags --prune --progress --no-recurse-submodules --unshallow {remoteName} {string.Join(" ", refSpec)}"); + } + else + { + // default options for git fetch. + options = StringUtil.Format($"--no-tags --prune --progress --no-recurse-submodules {remoteName} {string.Join(" ", refSpec)}"); + } + + int retryCount = 0; + int fetchExitCode = 0; + while (retryCount < 3) + { + fetchExitCode = await ExecuteGitCommandAsync(context, repositoryPath, "fetch", options, additionalCommandLine, cancellationToken); + if (fetchExitCode == 0) + { + break; + } + else + { + if (++retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + context.Warning($"Git fetch failed with exit code {fetchExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + return fetchExitCode; + } + + // git lfs fetch origin [ref] + public async Task GitLFSFetch(RunnerActionPluginExecutionContext context, string repositoryPath, string remoteName, string refSpec, string additionalCommandLine, CancellationToken cancellationToken) + { + context.Debug($"Fetch LFS objects for git repository at: {repositoryPath} remote: {remoteName}."); + + // default options for git lfs fetch. + string options = StringUtil.Format($"fetch origin {refSpec}"); + + int retryCount = 0; + int fetchExitCode = 0; + while (retryCount < 3) + { + fetchExitCode = await ExecuteGitCommandAsync(context, repositoryPath, "lfs", options, additionalCommandLine, cancellationToken); + if (fetchExitCode == 0) + { + break; + } + else + { + if (++retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + context.Warning($"Git lfs fetch failed with exit code {fetchExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + return fetchExitCode; + } + + // git lfs pull + public async Task GitLFSPull(RunnerActionPluginExecutionContext context, string repositoryPath, string additionalCommandLine, CancellationToken cancellationToken) + { + context.Debug($"Download LFS objects for git repository at: {repositoryPath}."); + + int retryCount = 0; + int pullExitCode = 0; + while (retryCount < 3) + { + pullExitCode = await ExecuteGitCommandAsync(context, repositoryPath, "lfs", "pull", additionalCommandLine, cancellationToken); + if (pullExitCode == 0) + { + break; + } + else + { + if (++retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + context.Warning($"Git lfs pull failed with exit code {pullExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + return pullExitCode; + } + + // git symbolic-ref -q + public async Task GitSymbolicRefHEAD(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug($"Check whether HEAD is detached HEAD."); + return await ExecuteGitCommandAsync(context, repositoryPath, "symbolic-ref", "-q HEAD"); + } + + // git checkout -f --progress + public async Task GitCheckout(RunnerActionPluginExecutionContext context, string repositoryPath, string committishOrBranchSpec, CancellationToken cancellationToken) + { + context.Debug($"Checkout {committishOrBranchSpec}."); + + // Git 2.7 support report checkout progress to stderr during stdout/err redirect. + string options; + if (gitVersion >= new Version(2, 7)) + { + options = StringUtil.Format("--progress --force {0}", committishOrBranchSpec); + } + else + { + options = StringUtil.Format("--force {0}", committishOrBranchSpec); + } + + return await ExecuteGitCommandAsync(context, repositoryPath, "checkout", options, cancellationToken); + } + + // git checkout -B --progress branch remoteBranch + public async Task GitCheckoutB(RunnerActionPluginExecutionContext context, string repositoryPath, string newBranch, string startPoint, CancellationToken cancellationToken) + { + context.Debug($"Checkout -B {newBranch} {startPoint}."); + + // Git 2.7 support report checkout progress to stderr during stdout/err redirect. + string options; + if (gitVersion >= new Version(2, 7)) + { + options = $"--progress --force -B {newBranch} {startPoint}"; + } + else + { + options = $"--force -B {newBranch} {startPoint}"; + } + + return await ExecuteGitCommandAsync(context, repositoryPath, "checkout", options, cancellationToken); + } + + // git clean -ffdx + public async Task GitClean(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug($"Delete untracked files/folders for repository at {repositoryPath}."); + + // Git 2.4 support git clean -ffdx. + string options; + if (gitVersion >= new Version(2, 4)) + { + options = "-ffdx"; + } + else + { + options = "-fdx"; + } + + return await ExecuteGitCommandAsync(context, repositoryPath, "clean", options); + } + + // git reset --hard + public async Task GitReset(RunnerActionPluginExecutionContext context, string repositoryPath, string commit = "HEAD") + { + context.Debug($"Undo any changes to tracked files in the working tree for repository at {repositoryPath}."); + return await ExecuteGitCommandAsync(context, repositoryPath, "reset", $"--hard {commit}"); + } + + // get remote set-url + public async Task GitRemoteAdd(RunnerActionPluginExecutionContext context, string repositoryPath, string remoteName, string remoteUrl) + { + context.Debug($"Add git remote: {remoteName} to url: {remoteUrl} for repository under: {repositoryPath}."); + return await ExecuteGitCommandAsync(context, repositoryPath, "remote", StringUtil.Format($"add {remoteName} {remoteUrl}")); + } + + // get remote set-url + public async Task GitRemoteSetUrl(RunnerActionPluginExecutionContext context, string repositoryPath, string remoteName, string remoteUrl) + { + context.Debug($"Set git fetch url to: {remoteUrl} for remote: {remoteName}."); + return await ExecuteGitCommandAsync(context, repositoryPath, "remote", StringUtil.Format($"set-url {remoteName} {remoteUrl}")); + } + + // get remote set-url --push + public async Task GitRemoteSetPushUrl(RunnerActionPluginExecutionContext context, string repositoryPath, string remoteName, string remoteUrl) + { + context.Debug($"Set git push url to: {remoteUrl} for remote: {remoteName}."); + return await ExecuteGitCommandAsync(context, repositoryPath, "remote", StringUtil.Format($"set-url --push {remoteName} {remoteUrl}")); + } + + // git submodule foreach git clean -ffdx + public async Task GitSubmoduleClean(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug($"Delete untracked files/folders for submodules at {repositoryPath}."); + + // Git 2.4 support git clean -ffdx. + string options; + if (gitVersion >= new Version(2, 4)) + { + options = "-ffdx"; + } + else + { + options = "-fdx"; + } + + return await ExecuteGitCommandAsync(context, repositoryPath, "submodule", $"foreach git clean {options}"); + } + + // git submodule foreach git reset --hard HEAD + public async Task GitSubmoduleReset(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug($"Undo any changes to tracked files in the working tree for submodules at {repositoryPath}."); + return await ExecuteGitCommandAsync(context, repositoryPath, "submodule", "foreach git reset --hard HEAD"); + } + + // git submodule update --init --force [--depth=15] [--recursive] + public async Task GitSubmoduleUpdate(RunnerActionPluginExecutionContext context, string repositoryPath, int fetchDepth, string additionalCommandLine, bool recursive, CancellationToken cancellationToken) + { + context.Debug("Update the registered git submodules."); + string options = "update --init --force"; + if (fetchDepth > 0) + { + options = options + $" --depth={fetchDepth}"; + } + if (recursive) + { + options = options + " --recursive"; + } + + return await ExecuteGitCommandAsync(context, repositoryPath, "submodule", options, additionalCommandLine, cancellationToken); + } + + // git submodule sync [--recursive] + public async Task GitSubmoduleSync(RunnerActionPluginExecutionContext context, string repositoryPath, bool recursive, CancellationToken cancellationToken) + { + context.Debug("Synchronizes submodules' remote URL configuration setting."); + string options = "sync"; + if (recursive) + { + options = options + " --recursive"; + } + + return await ExecuteGitCommandAsync(context, repositoryPath, "submodule", options, cancellationToken); + } + + // git config --get remote.origin.url + public async Task GitGetFetchUrl(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug($"Inspect remote.origin.url for repository under {repositoryPath}"); + Uri fetchUrl = null; + + List outputStrings = new List(); + int exitCode = await ExecuteGitCommandAsync(context, repositoryPath, "config", "--get remote.origin.url", outputStrings); + + if (exitCode != 0) + { + context.Warning($"'git config --get remote.origin.url' failed with exit code: {exitCode}, output: '{string.Join(Environment.NewLine, outputStrings)}'"); + } + else + { + // remove empty strings + outputStrings = outputStrings.Where(o => !string.IsNullOrEmpty(o)).ToList(); + if (outputStrings.Count == 1 && !string.IsNullOrEmpty(outputStrings.First())) + { + string remoteFetchUrl = outputStrings.First(); + if (Uri.IsWellFormedUriString(remoteFetchUrl, UriKind.Absolute)) + { + context.Debug($"Get remote origin fetch url from git config: {remoteFetchUrl}"); + fetchUrl = new Uri(remoteFetchUrl); + } + else + { + context.Debug($"The Origin fetch url from git config: {remoteFetchUrl} is not a absolute well formed url."); + } + } + else + { + context.Debug($"Unable capture git remote fetch uri from 'git config --get remote.origin.url' command's output, the command's output is not expected: {string.Join(Environment.NewLine, outputStrings)}."); + } + } + + return fetchUrl; + } + + // git config + public async Task GitConfig(RunnerActionPluginExecutionContext context, string repositoryPath, string configKey, string configValue) + { + context.Debug($"Set git config {configKey} {configValue}"); + return await ExecuteGitCommandAsync(context, repositoryPath, "config", StringUtil.Format($"{configKey} {configValue}")); + } + + // git config --get-all + public async Task GitConfigExist(RunnerActionPluginExecutionContext context, string repositoryPath, string configKey) + { + // git config --get-all {configKey} will return 0 and print the value if the config exist. + context.Debug($"Checking git config {configKey} exist or not"); + + // ignore any outputs by redirect them into a string list, since the output might contains secrets. + List outputStrings = new List(); + int exitcode = await ExecuteGitCommandAsync(context, repositoryPath, "config", StringUtil.Format($"--get-all {configKey}"), outputStrings); + + return exitcode == 0; + } + + // git config --unset-all + public async Task GitConfigUnset(RunnerActionPluginExecutionContext context, string repositoryPath, string configKey) + { + context.Debug($"Unset git config --unset-all {configKey}"); + return await ExecuteGitCommandAsync(context, repositoryPath, "config", StringUtil.Format($"--unset-all {configKey}")); + } + + // git config gc.auto 0 + public async Task GitDisableAutoGC(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug("Disable git auto garbage collection."); + return await ExecuteGitCommandAsync(context, repositoryPath, "config", "gc.auto 0"); + } + + // git repack -adfl + public async Task GitRepack(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug("Compress .git directory."); + return await ExecuteGitCommandAsync(context, repositoryPath, "repack", "-adfl"); + } + + // git prune + public async Task GitPrune(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug("Delete unreachable objects under .git directory."); + return await ExecuteGitCommandAsync(context, repositoryPath, "prune", "-v"); + } + + // git count-objects -v -H + public async Task GitCountObjects(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug("Inspect .git directory."); + return await ExecuteGitCommandAsync(context, repositoryPath, "count-objects", "-v -H"); + } + + // git lfs install --local + public async Task GitLFSInstall(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug("Ensure git-lfs installed."); + return await ExecuteGitCommandAsync(context, repositoryPath, "lfs", "install --local"); + } + + // git lfs logs last + public async Task GitLFSLogs(RunnerActionPluginExecutionContext context, string repositoryPath) + { + context.Debug("Get git-lfs logs."); + return await ExecuteGitCommandAsync(context, repositoryPath, "lfs", "logs last"); + } + + // git version + public async Task GitVersion(RunnerActionPluginExecutionContext context) + { + context.Debug("Get git version."); + string runnerWorkspace = context.GetRunnerContext("workspace"); + ArgUtil.Directory(runnerWorkspace, "runnerWorkspace"); + Version version = null; + List outputStrings = new List(); + int exitCode = await ExecuteGitCommandAsync(context, runnerWorkspace, "version", null, outputStrings); + context.Output($"{string.Join(Environment.NewLine, outputStrings)}"); + if (exitCode == 0) + { + // remove any empty line. + outputStrings = outputStrings.Where(o => !string.IsNullOrEmpty(o)).ToList(); + if (outputStrings.Count == 1 && !string.IsNullOrEmpty(outputStrings.First())) + { + string verString = outputStrings.First(); + // we interested about major.minor.patch version + Regex verRegex = new Regex("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase); + var matchResult = verRegex.Match(verString); + if (matchResult.Success && !string.IsNullOrEmpty(matchResult.Value)) + { + if (!Version.TryParse(matchResult.Value, out version)) + { + version = null; + } + } + } + } + + return version; + } + + // git lfs version + public async Task GitLfsVersion(RunnerActionPluginExecutionContext context) + { + context.Debug("Get git-lfs version."); + string runnerWorkspace = context.GetRunnerContext("workspace"); + ArgUtil.Directory(runnerWorkspace, "runnerWorkspace"); + Version version = null; + List outputStrings = new List(); + int exitCode = await ExecuteGitCommandAsync(context, runnerWorkspace, "lfs version", null, outputStrings); + context.Output($"{string.Join(Environment.NewLine, outputStrings)}"); + if (exitCode == 0) + { + // remove any empty line. + outputStrings = outputStrings.Where(o => !string.IsNullOrEmpty(o)).ToList(); + if (outputStrings.Count == 1 && !string.IsNullOrEmpty(outputStrings.First())) + { + string verString = outputStrings.First(); + // we interested about major.minor.patch version + Regex verRegex = new Regex("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase); + var matchResult = verRegex.Match(verString); + if (matchResult.Success && !string.IsNullOrEmpty(matchResult.Value)) + { + if (!Version.TryParse(matchResult.Value, out version)) + { + version = null; + } + } + } + } + + return version; + } + + private async Task ExecuteGitCommandAsync(RunnerActionPluginExecutionContext context, string repoRoot, string command, string options, CancellationToken cancellationToken = default(CancellationToken)) + { + string arg = StringUtil.Format($"{command} {options}").Trim(); + context.Command($"git {arg}"); + + var processInvoker = new ProcessInvoker(context); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + + return await processInvoker.ExecuteAsync( + workingDirectory: repoRoot, + fileName: gitPath, + arguments: arg, + environment: gitEnv, + requireExitCodeZero: false, + outputEncoding: s_encoding, + cancellationToken: cancellationToken); + } + + private async Task ExecuteGitCommandAsync(RunnerActionPluginExecutionContext context, string repoRoot, string command, string options, IList output) + { + string arg = StringUtil.Format($"{command} {options}").Trim(); + context.Command($"git {arg}"); + + if (output == null) + { + output = new List(); + } + + var processInvoker = new ProcessInvoker(context); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + output.Add(message.Data); + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + + return await processInvoker.ExecuteAsync( + workingDirectory: repoRoot, + fileName: gitPath, + arguments: arg, + environment: gitEnv, + requireExitCodeZero: false, + outputEncoding: s_encoding, + cancellationToken: default(CancellationToken)); + } + + private async Task ExecuteGitCommandAsync(RunnerActionPluginExecutionContext context, string repoRoot, string command, string options, string additionalCommandLine, CancellationToken cancellationToken) + { + string arg = StringUtil.Format($"{additionalCommandLine} {command} {options}").Trim(); + context.Command($"git {arg}"); + + var processInvoker = new ProcessInvoker(context); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + + return await processInvoker.ExecuteAsync( + workingDirectory: repoRoot, + fileName: gitPath, + arguments: arg, + environment: gitEnv, + requireExitCodeZero: false, + outputEncoding: s_encoding, + cancellationToken: cancellationToken); + } + } +} diff --git a/src/Runner.Plugins/Repository/v1.0/GitSourceProvider.cs b/src/Runner.Plugins/Repository/v1.0/GitSourceProvider.cs new file mode 100644 index 00000000000..ed7ba9f29f7 --- /dev/null +++ b/src/Runner.Plugins/Repository/v1.0/GitSourceProvider.cs @@ -0,0 +1,703 @@ +using Pipelines = GitHub.DistributedTask.Pipelines; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using System.IO; +using System.Text.RegularExpressions; +using System.Text; +using System.Diagnostics; +using GitHub.Runner.Sdk; +using System.Linq; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; + +namespace GitHub.Runner.Plugins.Repository.v1_0 +{ + public sealed class GitHubSourceProvider + { + // refs prefix + private const string _refsPrefix = "refs/heads/"; + private const string _remoteRefsPrefix = "refs/remotes/origin/"; + private const string _pullRefsPrefix = "refs/pull/"; + private const string _remotePullRefsPrefix = "refs/remotes/pull/"; + + // min git version that support add extra auth header. + private Version _minGitVersionSupportAuthHeader = new Version(2, 9); + +#if OS_WINDOWS + // min git version that support override sslBackend setting. + private Version _minGitVersionSupportSSLBackendOverride = new Version(2, 14, 2); +#endif + + // min git-lfs version that support add extra auth header. + private Version _minGitLfsVersionSupportAuthHeader = new Version(2, 1); + + private void RequirementCheck(RunnerActionPluginExecutionContext executionContext, GitCliManager gitCommandManager, bool checkGitLfs) + { + // v2.9 git exist use auth header. + gitCommandManager.EnsureGitVersion(_minGitVersionSupportAuthHeader, throwOnNotMatch: true); + +#if OS_WINDOWS + // check git version for SChannel SSLBackend (Windows Only) + bool schannelSslBackend = StringUtil.ConvertToBoolean(executionContext.GetRunnerContext("gituseschannel")); + if (schannelSslBackend) + { + gitCommandManager.EnsureGitVersion(_minGitVersionSupportSSLBackendOverride, throwOnNotMatch: true); + } +#endif + if (checkGitLfs) + { + // v2.1 git-lfs exist use auth header. + gitCommandManager.EnsureGitLFSVersion(_minGitLfsVersionSupportAuthHeader, throwOnNotMatch: true); + } + } + + private string GenerateBasicAuthHeader(RunnerActionPluginExecutionContext executionContext, string accessToken) + { + // use basic auth header with username:password in base64encoding. + string authHeader = $"x-access-token:{accessToken}"; + string base64encodedAuthHeader = Convert.ToBase64String(Encoding.UTF8.GetBytes(authHeader)); + + // add base64 encoding auth header into secretMasker. + executionContext.AddMask(base64encodedAuthHeader); + return $"basic {base64encodedAuthHeader}"; + } + + public async Task GetSourceAsync( + RunnerActionPluginExecutionContext executionContext, + string repositoryPath, + string repoFullName, + string sourceBranch, + string sourceVersion, + bool clean, + string submoduleInput, + int fetchDepth, + bool gitLfsSupport, + string accessToken, + CancellationToken cancellationToken) + { + // Validate args. + ArgUtil.NotNull(executionContext, nameof(executionContext)); + Uri proxyUrlWithCred = null; + string proxyUrlWithCredString = null; + bool useSelfSignedCACert = false; + bool useClientCert = false; + string clientCertPrivateKeyAskPassFile = null; + bool acceptUntrustedCerts = false; + + executionContext.Output($"Syncing repository: {repoFullName}"); + Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}"); + if (!repositoryUrl.IsAbsoluteUri) + { + throw new InvalidOperationException("Repository url need to be an absolute uri."); + } + + string targetPath = repositoryPath; + + // input Submodules can be ['', true, false, recursive] + // '' or false indicate don't checkout submodules + // true indicate checkout top level submodules + // recursive indicate checkout submodules recursively + bool checkoutSubmodules = false; + bool checkoutNestedSubmodules = false; + if (!string.IsNullOrEmpty(submoduleInput)) + { + if (string.Equals(submoduleInput, Pipelines.PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.Recursive, StringComparison.OrdinalIgnoreCase)) + { + checkoutSubmodules = true; + checkoutNestedSubmodules = true; + } + else + { + checkoutSubmodules = StringUtil.ConvertToBoolean(submoduleInput); + } + } + + var runnerCert = executionContext.GetCertConfiguration(); + acceptUntrustedCerts = runnerCert?.SkipServerCertificateValidation ?? false; + + executionContext.Debug($"repository url={repositoryUrl}"); + executionContext.Debug($"targetPath={targetPath}"); + executionContext.Debug($"sourceBranch={sourceBranch}"); + executionContext.Debug($"sourceVersion={sourceVersion}"); + executionContext.Debug($"clean={clean}"); + executionContext.Debug($"checkoutSubmodules={checkoutSubmodules}"); + executionContext.Debug($"checkoutNestedSubmodules={checkoutNestedSubmodules}"); + executionContext.Debug($"fetchDepth={fetchDepth}"); + executionContext.Debug($"gitLfsSupport={gitLfsSupport}"); + executionContext.Debug($"acceptUntrustedCerts={acceptUntrustedCerts}"); + +#if OS_WINDOWS + bool schannelSslBackend = StringUtil.ConvertToBoolean(executionContext.GetRunnerContext("gituseschannel")); + executionContext.Debug($"schannelSslBackend={schannelSslBackend}"); +#endif + + // Initialize git command manager with additional environment variables. + Dictionary gitEnv = new Dictionary(StringComparer.OrdinalIgnoreCase); + + // Disable prompting for git credential manager + gitEnv["GCM_INTERACTIVE"] = "Never"; + + // Git-lfs will try to pull down asset if any of the local/user/system setting exist. + // If customer didn't enable `LFS` in their pipeline definition, we will use ENV to disable LFS fetch/checkout. + if (!gitLfsSupport) + { + gitEnv["GIT_LFS_SKIP_SMUDGE"] = "1"; + } + + // Add the public variables. + foreach (var variable in executionContext.Variables) + { + // Add the variable using the formatted name. + string formattedKey = (variable.Key ?? string.Empty).Replace('.', '_').Replace(' ', '_').ToUpperInvariant(); + gitEnv[formattedKey] = variable.Value?.Value ?? string.Empty; + } + + GitCliManager gitCommandManager = new GitCliManager(gitEnv); + await gitCommandManager.LoadGitExecutionInfo(executionContext); + + // Make sure the build machine met all requirements for the git repository + // For now, the requirement we have are: + // 1. git version greater than 2.9 since we need to use auth header. + // 2. git-lfs version greater than 2.1 since we need to use auth header. + // 3. git version greater than 2.14.2 if use SChannel for SSL backend (Windows only) + RequirementCheck(executionContext, gitCommandManager, gitLfsSupport); + + // prepare credentail embedded urls + var runnerProxy = executionContext.GetProxyConfiguration(); + if (runnerProxy != null && !string.IsNullOrEmpty(runnerProxy.ProxyAddress) && !runnerProxy.WebProxy.IsBypassed(repositoryUrl)) + { + proxyUrlWithCred = UrlUtil.GetCredentialEmbeddedUrl(new Uri(runnerProxy.ProxyAddress), runnerProxy.ProxyUsername, runnerProxy.ProxyPassword); + + // uri.absoluteuri will not contains port info if the scheme is http/https and the port is 80/443 + // however, git.exe always require you provide port info, if nothing passed in, it will use 1080 as default + // as result, we need prefer the uri.originalstring when it's different than uri.absoluteuri. + if (string.Equals(proxyUrlWithCred.AbsoluteUri, proxyUrlWithCred.OriginalString, StringComparison.OrdinalIgnoreCase)) + { + proxyUrlWithCredString = proxyUrlWithCred.AbsoluteUri; + } + else + { + proxyUrlWithCredString = proxyUrlWithCred.OriginalString; + } + } + + // prepare askpass for client cert private key, if the repository's endpoint url match the runner config url + var systemConnection = executionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + if (runnerCert != null && Uri.Compare(repositoryUrl, systemConnection.Url, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) == 0) + { + if (!string.IsNullOrEmpty(runnerCert.CACertificateFile)) + { + useSelfSignedCACert = true; + } + + if (!string.IsNullOrEmpty(runnerCert.ClientCertificateFile) && + !string.IsNullOrEmpty(runnerCert.ClientCertificatePrivateKeyFile)) + { + useClientCert = true; + + // prepare askpass for client cert password + if (!string.IsNullOrEmpty(runnerCert.ClientCertificatePassword)) + { + clientCertPrivateKeyAskPassFile = Path.Combine(executionContext.GetRunnerContext("temp"), $"{Guid.NewGuid()}.sh"); + List askPass = new List(); + askPass.Add("#!/bin/sh"); + askPass.Add($"echo \"{runnerCert.ClientCertificatePassword}\""); + File.WriteAllLines(clientCertPrivateKeyAskPassFile, askPass); + +#if !OS_WINDOWS + string toolPath = WhichUtil.Which("chmod", true); + string argLine = $"775 {clientCertPrivateKeyAskPassFile}"; + executionContext.Command($"chmod {argLine}"); + + var processInvoker = new ProcessInvoker(executionContext); + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + executionContext.Output(args.Data); + } + }; + processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + executionContext.Output(args.Data); + } + }; + + string workingDirectory = executionContext.GetRunnerContext("workspace"); + await processInvoker.ExecuteAsync(workingDirectory, toolPath, argLine, null, true, CancellationToken.None); +#endif + } + } + } + + // Check the current contents of the root folder to see if there is already a repo + // If there is a repo, see if it matches the one we are expecting to be there based on the remote fetch url + // if the repo is not what we expect, remove the folder + if (!await IsRepositoryOriginUrlMatch(executionContext, gitCommandManager, targetPath, repositoryUrl)) + { + // Delete source folder + IOUtil.DeleteDirectory(targetPath, cancellationToken); + } + else + { + // delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time. + string lockFile = Path.Combine(targetPath, ".git\\index.lock"); + if (File.Exists(lockFile)) + { + try + { + File.Delete(lockFile); + } + catch (Exception ex) + { + executionContext.Debug($"Unable to delete the index.lock file: {lockFile}"); + executionContext.Debug(ex.ToString()); + } + } + + // delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time. + string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock"); + if (File.Exists(shallowLockFile)) + { + try + { + File.Delete(shallowLockFile); + } + catch (Exception ex) + { + executionContext.Debug($"Unable to delete the shallow.lock file: {shallowLockFile}"); + executionContext.Debug(ex.ToString()); + } + } + + // When repo.clean is selected for a git repo, execute git clean -ffdx and git reset --hard HEAD on the current repo. + // This will help us save the time to reclone the entire repo. + // If any git commands exit with non-zero return code or any exception happened during git.exe invoke, fall back to delete the repo folder. + if (clean) + { + Boolean softCleanSucceed = true; + + // git clean -ffdx + int exitCode_clean = await gitCommandManager.GitClean(executionContext, targetPath); + if (exitCode_clean != 0) + { + executionContext.Debug($"'git clean -ffdx' failed with exit code {exitCode_clean}, this normally caused by:\n 1) Path too long\n 2) Permission issue\n 3) File in use\nFor futher investigation, manually run 'git clean -ffdx' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + + // git reset --hard HEAD + if (softCleanSucceed) + { + int exitCode_reset = await gitCommandManager.GitReset(executionContext, targetPath); + if (exitCode_reset != 0) + { + executionContext.Debug($"'git reset --hard HEAD' failed with exit code {exitCode_reset}\nFor futher investigation, manually run 'git reset --hard HEAD' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + } + + // git clean -ffdx and git reset --hard HEAD for each submodule + if (checkoutSubmodules) + { + if (softCleanSucceed) + { + int exitCode_submoduleclean = await gitCommandManager.GitSubmoduleClean(executionContext, targetPath); + if (exitCode_submoduleclean != 0) + { + executionContext.Debug($"'git submodule foreach git clean -ffdx' failed with exit code {exitCode_submoduleclean}\nFor futher investigation, manually run 'git submodule foreach git clean -ffdx' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + } + + if (softCleanSucceed) + { + int exitCode_submodulereset = await gitCommandManager.GitSubmoduleReset(executionContext, targetPath); + if (exitCode_submodulereset != 0) + { + executionContext.Debug($"'git submodule foreach git reset --hard HEAD' failed with exit code {exitCode_submodulereset}\nFor futher investigation, manually run 'git submodule foreach git reset --hard HEAD' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + } + } + + if (!softCleanSucceed) + { + //fall back + executionContext.Warning("Unable to run \"git clean -ffdx\" and \"git reset --hard HEAD\" successfully, delete source folder instead."); + IOUtil.DeleteDirectory(targetPath, cancellationToken); + } + } + } + + // if the folder is missing, create it + if (!Directory.Exists(targetPath)) + { + Directory.CreateDirectory(targetPath); + } + + // if the folder contains a .git folder, it means the folder contains a git repo that matches the remote url and in a clean state. + // we will run git fetch to update the repo. + if (!Directory.Exists(Path.Combine(targetPath, ".git"))) + { + // init git repository + int exitCode_init = await gitCommandManager.GitInit(executionContext, targetPath); + if (exitCode_init != 0) + { + throw new InvalidOperationException($"Unable to use git.exe init repository under {targetPath}, 'git init' failed with exit code: {exitCode_init}"); + } + + int exitCode_addremote = await gitCommandManager.GitRemoteAdd(executionContext, targetPath, "origin", repositoryUrl.AbsoluteUri); + if (exitCode_addremote != 0) + { + throw new InvalidOperationException($"Unable to use git.exe add remote 'origin', 'git remote add' failed with exit code: {exitCode_addremote}"); + } + } + + cancellationToken.ThrowIfCancellationRequested(); + + // disable git auto gc + int exitCode_disableGC = await gitCommandManager.GitDisableAutoGC(executionContext, targetPath); + if (exitCode_disableGC != 0) + { + executionContext.Warning("Unable turn off git auto garbage collection, git fetch operation may trigger auto garbage collection which will affect the performance of fetching."); + } + + // always remove any possible left extraheader setting from git config. + if (await gitCommandManager.GitConfigExist(executionContext, targetPath, $"http.{repositoryUrl.AbsoluteUri}.extraheader")) + { + executionContext.Debug("Remove any extraheader setting from git config."); + await RemoveGitConfig(executionContext, gitCommandManager, targetPath, $"http.{repositoryUrl.AbsoluteUri}.extraheader", string.Empty); + } + + // always remove any possible left proxy setting from git config, the proxy setting may contains credential + if (await gitCommandManager.GitConfigExist(executionContext, targetPath, $"http.proxy")) + { + executionContext.Debug("Remove any proxy setting from git config."); + await RemoveGitConfig(executionContext, gitCommandManager, targetPath, $"http.proxy", string.Empty); + } + + List additionalFetchArgs = new List(); + List additionalLfsFetchArgs = new List(); + + // add accessToken as basic auth header to handle auth challenge. + if (!string.IsNullOrEmpty(accessToken)) + { + additionalFetchArgs.Add($"-c http.extraheader=\"AUTHORIZATION: {GenerateBasicAuthHeader(executionContext, accessToken)}\""); + } + + // Prepare proxy config for fetch. + if (runnerProxy != null && !string.IsNullOrEmpty(runnerProxy.ProxyAddress) && !runnerProxy.WebProxy.IsBypassed(repositoryUrl)) + { + executionContext.Debug($"Config proxy server '{runnerProxy.ProxyAddress}' for git fetch."); + ArgUtil.NotNullOrEmpty(proxyUrlWithCredString, nameof(proxyUrlWithCredString)); + additionalFetchArgs.Add($"-c http.proxy=\"{proxyUrlWithCredString}\""); + additionalLfsFetchArgs.Add($"-c http.proxy=\"{proxyUrlWithCredString}\""); + } + + // Prepare ignore ssl cert error config for fetch. + if (acceptUntrustedCerts) + { + additionalFetchArgs.Add($"-c http.sslVerify=false"); + additionalLfsFetchArgs.Add($"-c http.sslVerify=false"); + } + + // Prepare self-signed CA cert config for fetch from server. + if (useSelfSignedCACert) + { + executionContext.Debug($"Use self-signed certificate '{runnerCert.CACertificateFile}' for git fetch."); + additionalFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\""); + additionalLfsFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\""); + } + + // Prepare client cert config for fetch from server. + if (useClientCert) + { + executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git fetch."); + + if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile)) + { + additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\""); + additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\""); + } + else + { + additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\""); + additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\""); + } + } + +#if OS_WINDOWS + if (schannelSslBackend) + { + executionContext.Debug("Use SChannel SslBackend for git fetch."); + additionalFetchArgs.Add("-c http.sslbackend=\"schannel\""); + additionalLfsFetchArgs.Add("-c http.sslbackend=\"schannel\""); + } +#endif + // Prepare gitlfs url for fetch and checkout + if (gitLfsSupport) + { + // Initialize git lfs by execute 'git lfs install' + executionContext.Debug("Setup the local Git hooks for Git LFS."); + int exitCode_lfsInstall = await gitCommandManager.GitLFSInstall(executionContext, targetPath); + if (exitCode_lfsInstall != 0) + { + throw new InvalidOperationException($"Git-lfs installation failed with exit code: {exitCode_lfsInstall}"); + } + + if (!string.IsNullOrEmpty(accessToken)) + { + string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty); + additionalLfsFetchArgs.Add($"-c http.{authorityUrl}.extraheader=\"AUTHORIZATION: {GenerateBasicAuthHeader(executionContext, accessToken)}\""); + } + } + + List additionalFetchSpecs = new List(); + additionalFetchSpecs.Add("+refs/heads/*:refs/remotes/origin/*"); + + if (IsPullRequest(sourceBranch)) + { + additionalFetchSpecs.Add($"+{sourceBranch}:{GetRemoteRefName(sourceBranch)}"); + } + + int exitCode_fetch = await gitCommandManager.GitFetch(executionContext, targetPath, "origin", fetchDepth, additionalFetchSpecs, string.Join(" ", additionalFetchArgs), cancellationToken); + if (exitCode_fetch != 0) + { + throw new InvalidOperationException($"Git fetch failed with exit code: {exitCode_fetch}"); + } + + // Checkout + // sourceToBuild is used for checkout + // if sourceBranch is a PR branch or sourceVersion is null, make sure branch name is a remote branch. we need checkout to detached head. + // (change refs/heads to refs/remotes/origin, refs/pull to refs/remotes/pull, or leave it as it when the branch name doesn't contain refs/...) + // if sourceVersion provide, just use that for checkout, since when you checkout a commit, it will end up in detached head. + cancellationToken.ThrowIfCancellationRequested(); + string sourcesToBuild; + if (IsPullRequest(sourceBranch) || string.IsNullOrEmpty(sourceVersion)) + { + sourcesToBuild = GetRemoteRefName(sourceBranch); + } + else + { + sourcesToBuild = sourceVersion; + } + + // fetch lfs object upfront, this will avoid fetch lfs object during checkout which cause checkout taking forever + // since checkout will fetch lfs object 1 at a time, while git lfs fetch will fetch lfs object in parallel. + if (gitLfsSupport) + { + int exitCode_lfsFetch = await gitCommandManager.GitLFSFetch(executionContext, targetPath, "origin", sourcesToBuild, string.Join(" ", additionalLfsFetchArgs), cancellationToken); + if (exitCode_lfsFetch != 0) + { + // local repository is shallow repository, lfs fetch may fail due to lack of commits history. + // this will happen when the checkout commit is older than tip -> fetchDepth + if (fetchDepth > 0) + { + executionContext.Warning($"Git lfs fetch failed on shallow repository, this might because of git fetch with depth '{fetchDepth}' doesn't include the lfs fetch commit '{sourcesToBuild}'."); + } + + // git lfs fetch failed, get lfs log, the log is critical for debug. + int exitCode_lfsLogs = await gitCommandManager.GitLFSLogs(executionContext, targetPath); + throw new InvalidOperationException($"Git lfs fetch failed with exit code: {exitCode_lfsFetch}. Git lfs logs returned with exit code: {exitCode_lfsLogs}."); + } + } + + // Finally, checkout the sourcesToBuild (if we didn't find a valid git object this will throw) + int exitCode_checkout = await gitCommandManager.GitCheckout(executionContext, targetPath, sourcesToBuild, cancellationToken); + if (exitCode_checkout != 0) + { + // local repository is shallow repository, checkout may fail due to lack of commits history. + // this will happen when the checkout commit is older than tip -> fetchDepth + if (fetchDepth > 0) + { + executionContext.Warning($"Git checkout failed on shallow repository, this might because of git fetch with depth '{fetchDepth}' doesn't include the checkout commit '{sourcesToBuild}'."); + } + + throw new InvalidOperationException($"Git checkout failed with exit code: {exitCode_checkout}"); + } + + // Submodule update + if (checkoutSubmodules) + { + cancellationToken.ThrowIfCancellationRequested(); + + int exitCode_submoduleSync = await gitCommandManager.GitSubmoduleSync(executionContext, targetPath, checkoutNestedSubmodules, cancellationToken); + if (exitCode_submoduleSync != 0) + { + throw new InvalidOperationException($"Git submodule sync failed with exit code: {exitCode_submoduleSync}"); + } + + List additionalSubmoduleUpdateArgs = new List(); + + if (!string.IsNullOrEmpty(accessToken)) + { + string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty); + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.extraheader=\"AUTHORIZATION: {GenerateBasicAuthHeader(executionContext, accessToken)}\""); + } + + // Prepare proxy config for submodule update. + if (runnerProxy != null && !string.IsNullOrEmpty(runnerProxy.ProxyAddress) && !runnerProxy.WebProxy.IsBypassed(repositoryUrl)) + { + executionContext.Debug($"Config proxy server '{runnerProxy.ProxyAddress}' for git submodule update."); + ArgUtil.NotNullOrEmpty(proxyUrlWithCredString, nameof(proxyUrlWithCredString)); + additionalSubmoduleUpdateArgs.Add($"-c http.proxy=\"{proxyUrlWithCredString}\""); + } + + // Prepare ignore ssl cert error config for fetch. + if (acceptUntrustedCerts) + { + additionalSubmoduleUpdateArgs.Add($"-c http.sslVerify=false"); + } + + // Prepare self-signed CA cert config for submodule update. + if (useSelfSignedCACert) + { + executionContext.Debug($"Use self-signed CA certificate '{runnerCert.CACertificateFile}' for git submodule update."); + string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty); + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcainfo=\"{runnerCert.CACertificateFile}\""); + } + + // Prepare client cert config for submodule update. + if (useClientCert) + { + executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git submodule update."); + string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty); + + if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile)) + { + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.{authorityUrl}.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\""); + } + else + { + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\""); + } + } +#if OS_WINDOWS + if (schannelSslBackend) + { + executionContext.Debug("Use SChannel SslBackend for git submodule update."); + additionalSubmoduleUpdateArgs.Add("-c http.sslbackend=\"schannel\""); + } +#endif + + int exitCode_submoduleUpdate = await gitCommandManager.GitSubmoduleUpdate(executionContext, targetPath, fetchDepth, string.Join(" ", additionalSubmoduleUpdateArgs), checkoutNestedSubmodules, cancellationToken); + if (exitCode_submoduleUpdate != 0) + { + throw new InvalidOperationException($"Git submodule update failed with exit code: {exitCode_submoduleUpdate}"); + } + } + + if (useClientCert && !string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile)) + { + executionContext.Debug("Remove git.sslkey askpass file."); + IOUtil.DeleteFile(clientCertPrivateKeyAskPassFile); + } + } + + private async Task IsRepositoryOriginUrlMatch(RunnerActionPluginExecutionContext context, GitCliManager gitCommandManager, string repositoryPath, Uri expectedRepositoryOriginUrl) + { + context.Debug($"Checking if the repo on {repositoryPath} matches the expected repository origin URL. expected Url: {expectedRepositoryOriginUrl.AbsoluteUri}"); + if (!Directory.Exists(Path.Combine(repositoryPath, ".git"))) + { + // There is no repo directory + context.Debug($"Repository is not found since '.git' directory does not exist under. {repositoryPath}"); + return false; + } + + Uri remoteUrl; + remoteUrl = await gitCommandManager.GitGetFetchUrl(context, repositoryPath); + + if (remoteUrl == null) + { + // origin fetch url not found. + context.Debug("Repository remote origin fetch url is empty."); + return false; + } + + context.Debug($"Repository remote origin fetch url is {remoteUrl}"); + // compare the url passed in with the remote url found + if (expectedRepositoryOriginUrl.Equals(remoteUrl)) + { + context.Debug("URLs match."); + return true; + } + else + { + context.Debug($"The remote.origin.url of the repository under root folder '{repositoryPath}' doesn't matches source repository url."); + return false; + } + } + + private async Task RemoveGitConfig(RunnerActionPluginExecutionContext executionContext, GitCliManager gitCommandManager, string targetPath, string configKey, string configValue) + { + int exitCode_configUnset = await gitCommandManager.GitConfigUnset(executionContext, targetPath, configKey); + if (exitCode_configUnset != 0) + { + // if unable to use git.exe unset http.extraheader, http.proxy or core.askpass, modify git config file on disk. make sure we don't left credential. + if (!string.IsNullOrEmpty(configValue)) + { + executionContext.Warning("An unsuccessful attempt was made using git command line to remove \"http.extraheader\" from the git config. Attempting to modify the git config file directly to remove the credential."); + string gitConfig = Path.Combine(targetPath, ".git/config"); + if (File.Exists(gitConfig)) + { + string gitConfigContent = File.ReadAllText(Path.Combine(targetPath, ".git", "config")); + if (gitConfigContent.Contains(configKey)) + { + string setting = $"extraheader = {configValue}"; + gitConfigContent = Regex.Replace(gitConfigContent, setting, string.Empty, RegexOptions.IgnoreCase); + + setting = $"proxy = {configValue}"; + gitConfigContent = Regex.Replace(gitConfigContent, setting, string.Empty, RegexOptions.IgnoreCase); + + setting = $"askpass = {configValue}"; + gitConfigContent = Regex.Replace(gitConfigContent, setting, string.Empty, RegexOptions.IgnoreCase); + + File.WriteAllText(gitConfig, gitConfigContent); + } + } + } + else + { + executionContext.Warning($"Unable to remove \"{configKey}\" from the git config. To remove the credential, execute \"git config --unset - all {configKey}\" from the repository root \"{targetPath}\"."); + } + } + } + + private bool IsPullRequest(string sourceBranch) + { + return !string.IsNullOrEmpty(sourceBranch) && + (sourceBranch.StartsWith(_pullRefsPrefix, StringComparison.OrdinalIgnoreCase) || + sourceBranch.StartsWith(_remotePullRefsPrefix, StringComparison.OrdinalIgnoreCase)); + } + + private string GetRemoteRefName(string refName) + { + if (string.IsNullOrEmpty(refName)) + { + // If the refName is empty return the remote name for master + refName = _remoteRefsPrefix + "master"; + } + else if (refName.Equals("master", StringComparison.OrdinalIgnoreCase)) + { + // If the refName is master return the remote name for master + refName = _remoteRefsPrefix + refName; + } + else if (refName.StartsWith(_refsPrefix, StringComparison.OrdinalIgnoreCase)) + { + // If the refName is refs/heads change it to the remote version of the name + refName = _remoteRefsPrefix + refName.Substring(_refsPrefix.Length); + } + else if (refName.StartsWith(_pullRefsPrefix, StringComparison.OrdinalIgnoreCase)) + { + // If the refName is refs/pull change it to the remote version of the name + refName = refName.Replace(_pullRefsPrefix, _remotePullRefsPrefix); + } + + return refName; + } + } +} diff --git a/src/Runner.Plugins/Repository/v1.0/RepositoryPlugin.cs b/src/Runner.Plugins/Repository/v1.0/RepositoryPlugin.cs new file mode 100644 index 00000000000..a216ac306ad --- /dev/null +++ b/src/Runner.Plugins/Repository/v1.0/RepositoryPlugin.cs @@ -0,0 +1,175 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Sdk; +using Pipelines = GitHub.DistributedTask.Pipelines; +using System.IO; +using GitHub.DistributedTask.Pipelines.ContextData; +using System.Text.RegularExpressions; +using GitHub.DistributedTask.Pipelines.Expressions; +using System.Text; + +namespace GitHub.Runner.Plugins.Repository.v1_0 +{ + public class CheckoutTask : IRunnerActionPlugin + { + private readonly Regex _validSha1 = new Regex(@"\b[0-9a-f]{40}\b", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, TimeSpan.FromSeconds(2)); + + public async Task RunAsync(RunnerActionPluginExecutionContext executionContext, CancellationToken token) + { + string runnerWorkspace = executionContext.GetRunnerContext("workspace"); + ArgUtil.Directory(runnerWorkspace, nameof(runnerWorkspace)); + string tempDirectory = executionContext.GetRunnerContext("temp"); + ArgUtil.Directory(tempDirectory, nameof(tempDirectory)); + + var repoFullName = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Repository); + if (string.IsNullOrEmpty(repoFullName)) + { + repoFullName = executionContext.GetGitHubContext("repository"); + } + + var repoFullNameSplit = repoFullName.Split("/", StringSplitOptions.RemoveEmptyEntries); + if (repoFullNameSplit.Length != 2) + { + throw new ArgumentOutOfRangeException(repoFullName); + } + + string expectRepoPath; + var path = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Path); + if (!string.IsNullOrEmpty(path)) + { + expectRepoPath = IOUtil.ResolvePath(runnerWorkspace, path); + if (!expectRepoPath.StartsWith(runnerWorkspace.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar) + Path.DirectorySeparatorChar)) + { + throw new ArgumentException($"Input path '{path}' should resolve to a directory under '{runnerWorkspace}', current resolved path '{expectRepoPath}'."); + } + } + else + { + // When repository doesn't has path set, default to sources directory 1/repoName + expectRepoPath = Path.Combine(runnerWorkspace, repoFullNameSplit[1]); + } + + var workspaceRepo = executionContext.GetGitHubContext("repository"); + // for self repository, we need to let the worker knows where it is after checkout. + if (string.Equals(workspaceRepo, repoFullName, StringComparison.OrdinalIgnoreCase)) + { + var workspaceRepoPath = executionContext.GetGitHubContext("workspace"); + + executionContext.Debug($"Repository requires to be placed at '{expectRepoPath}', current location is '{workspaceRepoPath}'"); + if (!string.Equals(workspaceRepoPath.Trim(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), expectRepoPath.Trim(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), IOUtil.FilePathStringComparison)) + { + executionContext.Output($"Repository is current at '{workspaceRepoPath}', move to '{expectRepoPath}'."); + var count = 1; + var staging = Path.Combine(tempDirectory, $"_{count}"); + while (Directory.Exists(staging)) + { + count++; + staging = Path.Combine(tempDirectory, $"_{count}"); + } + + try + { + executionContext.Debug($"Move existing repository '{workspaceRepoPath}' to '{expectRepoPath}' via staging directory '{staging}'."); + IOUtil.MoveDirectory(workspaceRepoPath, expectRepoPath, staging, CancellationToken.None); + } + catch (Exception ex) + { + executionContext.Debug("Catch exception during repository move."); + executionContext.Debug(ex.ToString()); + executionContext.Warning("Unable move and reuse existing repository to required location."); + IOUtil.DeleteDirectory(expectRepoPath, CancellationToken.None); + } + + executionContext.Output($"Repository will locate at '{expectRepoPath}'."); + } + + executionContext.Debug($"Update workspace repository location."); + executionContext.SetRepositoryPath(repoFullName, expectRepoPath, true); + } + + string sourceBranch; + string sourceVersion; + string refInput = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Ref); + if (string.IsNullOrEmpty(refInput)) + { + sourceBranch = executionContext.GetGitHubContext("ref"); + sourceVersion = executionContext.GetGitHubContext("sha"); + } + else + { + sourceBranch = refInput; + sourceVersion = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Version); // version get removed when checkout move to repo in the graph + if (string.IsNullOrEmpty(sourceVersion) && RegexUtility.IsMatch(sourceBranch, WellKnownRegularExpressions.SHA1)) + { + sourceVersion = sourceBranch; + + // If Ref is a SHA and the repo is self, we need to use github.ref as source branch since it might be refs/pull/* + if (string.Equals(workspaceRepo, repoFullName, StringComparison.OrdinalIgnoreCase)) + { + sourceBranch = executionContext.GetGitHubContext("ref"); + } + else + { + sourceBranch = "refs/heads/master"; + } + } + } + + bool clean = StringUtil.ConvertToBoolean(executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Clean), true); + string submoduleInput = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Submodules); + + int fetchDepth = 0; + if (!int.TryParse(executionContext.GetInput("fetch-depth"), out fetchDepth) || fetchDepth < 0) + { + fetchDepth = 0; + } + + bool gitLfsSupport = StringUtil.ConvertToBoolean(executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Lfs)); + string accessToken = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Token); + if (string.IsNullOrEmpty(accessToken)) + { + accessToken = executionContext.GetGitHubContext("token"); + } + + // register problem matcher + string problemMatcher = @" +{ + ""problemMatcher"": [ + { + ""owner"": ""checkout-git"", + ""pattern"": [ + { + ""regexp"": ""^fatal: (.*)$"", + ""message"": 1 + } + ] + } + ] +}"; + string matcherFile = Path.Combine(tempDirectory, $"git_{Guid.NewGuid()}.json"); + File.WriteAllText(matcherFile, problemMatcher, new UTF8Encoding(false)); + executionContext.Output($"##[add-matcher]{matcherFile}"); + try + { + await new GitHubSourceProvider().GetSourceAsync(executionContext, + expectRepoPath, + repoFullName, + sourceBranch, + sourceVersion, + clean, + submoduleInput, + fetchDepth, + gitLfsSupport, + accessToken, + token); + } + finally + { + executionContext.Output("##[remove-matcher owner=checkout-git]"); + } + } + } +} diff --git a/src/Runner.Plugins/Repository/v1.1/GitSourceProvider.cs b/src/Runner.Plugins/Repository/v1.1/GitSourceProvider.cs new file mode 100644 index 00000000000..09492bcda9d --- /dev/null +++ b/src/Runner.Plugins/Repository/v1.1/GitSourceProvider.cs @@ -0,0 +1,740 @@ +using Pipelines = GitHub.DistributedTask.Pipelines; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using System.IO; +using System.Text.RegularExpressions; +using System.Text; +using System.Diagnostics; +using GitHub.Runner.Sdk; +using System.Linq; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; + +namespace GitHub.Runner.Plugins.Repository.v1_1 +{ + public sealed class GitHubSourceProvider + { + // refs prefix + private const string _refsPrefix = "refs/heads/"; + private const string _remoteRefsPrefix = "refs/remotes/origin/"; + private const string _pullRefsPrefix = "refs/pull/"; + private const string _remotePullRefsPrefix = "refs/remotes/pull/"; + private const string _tagRefsPrefix = "refs/tags/"; + + // min git version that support add extra auth header. + private Version _minGitVersionSupportAuthHeader = new Version(2, 9); + +#if OS_WINDOWS + // min git version that support override sslBackend setting. + private Version _minGitVersionSupportSSLBackendOverride = new Version(2, 14, 2); +#endif + + // min git-lfs version that support add extra auth header. + private Version _minGitLfsVersionSupportAuthHeader = new Version(2, 1); + + public static string ProblemMatcher => @" +{ + ""problemMatcher"": [ + { + ""owner"": ""checkout-git"", + ""pattern"": [ + { + ""regexp"": ""^(fatal|error): (.*)$"", + ""message"": 2 + } + ] + } + ] +}"; + + public async Task GetSourceAsync( + RunnerActionPluginExecutionContext executionContext, + string repositoryPath, + string repoFullName, + string sourceBranch, + string sourceVersion, + bool clean, + string submoduleInput, + int fetchDepth, + bool gitLfsSupport, + string accessToken, + CancellationToken cancellationToken) + { + // Validate args. + ArgUtil.NotNull(executionContext, nameof(executionContext)); + Dictionary configModifications = new Dictionary(); + Uri proxyUrlWithCred = null; + string proxyUrlWithCredString = null; + bool useSelfSignedCACert = false; + bool useClientCert = false; + string clientCertPrivateKeyAskPassFile = null; + bool acceptUntrustedCerts = false; + + executionContext.Output($"Syncing repository: {repoFullName}"); + Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}"); + if (!repositoryUrl.IsAbsoluteUri) + { + throw new InvalidOperationException("Repository url need to be an absolute uri."); + } + + string targetPath = repositoryPath; + + // input Submodules can be ['', true, false, recursive] + // '' or false indicate don't checkout submodules + // true indicate checkout top level submodules + // recursive indicate checkout submodules recursively + bool checkoutSubmodules = false; + bool checkoutNestedSubmodules = false; + if (!string.IsNullOrEmpty(submoduleInput)) + { + if (string.Equals(submoduleInput, Pipelines.PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.Recursive, StringComparison.OrdinalIgnoreCase)) + { + checkoutSubmodules = true; + checkoutNestedSubmodules = true; + } + else + { + checkoutSubmodules = StringUtil.ConvertToBoolean(submoduleInput); + } + } + + var runnerCert = executionContext.GetCertConfiguration(); + acceptUntrustedCerts = runnerCert?.SkipServerCertificateValidation ?? false; + + executionContext.Debug($"repository url={repositoryUrl}"); + executionContext.Debug($"targetPath={targetPath}"); + executionContext.Debug($"sourceBranch={sourceBranch}"); + executionContext.Debug($"sourceVersion={sourceVersion}"); + executionContext.Debug($"clean={clean}"); + executionContext.Debug($"checkoutSubmodules={checkoutSubmodules}"); + executionContext.Debug($"checkoutNestedSubmodules={checkoutNestedSubmodules}"); + executionContext.Debug($"fetchDepth={fetchDepth}"); + executionContext.Debug($"gitLfsSupport={gitLfsSupport}"); + executionContext.Debug($"acceptUntrustedCerts={acceptUntrustedCerts}"); + +#if OS_WINDOWS + bool schannelSslBackend = StringUtil.ConvertToBoolean(executionContext.GetRunnerContext("gituseschannel")); + executionContext.Debug($"schannelSslBackend={schannelSslBackend}"); +#endif + + // Initialize git command manager with additional environment variables. + Dictionary gitEnv = new Dictionary(StringComparer.OrdinalIgnoreCase); + + // Disable git prompt + gitEnv["GIT_TERMINAL_PROMPT"] = "0"; + + // Disable prompting for git credential manager + gitEnv["GCM_INTERACTIVE"] = "Never"; + + // Git-lfs will try to pull down asset if any of the local/user/system setting exist. + // If customer didn't enable `LFS` in their pipeline definition, we will use ENV to disable LFS fetch/checkout. + if (!gitLfsSupport) + { + gitEnv["GIT_LFS_SKIP_SMUDGE"] = "1"; + } + + // Add the public variables. + foreach (var variable in executionContext.Variables) + { + // Add the variable using the formatted name. + string formattedKey = (variable.Key ?? string.Empty).Replace('.', '_').Replace(' ', '_').ToUpperInvariant(); + gitEnv[formattedKey] = variable.Value?.Value ?? string.Empty; + } + + GitCliManager gitCommandManager = new GitCliManager(gitEnv); + await gitCommandManager.LoadGitExecutionInfo(executionContext); + + // Make sure the build machine met all requirements for the git repository + // For now, the requirement we have are: + // 1. git version greater than 2.9 since we need to use auth header. + // 2. git-lfs version greater than 2.1 since we need to use auth header. + // 3. git version greater than 2.14.2 if use SChannel for SSL backend (Windows only) + RequirementCheck(executionContext, gitCommandManager, gitLfsSupport); + + // prepare credentail embedded urls + var runnerProxy = executionContext.GetProxyConfiguration(); + if (runnerProxy != null && !string.IsNullOrEmpty(runnerProxy.ProxyAddress) && !runnerProxy.WebProxy.IsBypassed(repositoryUrl)) + { + proxyUrlWithCred = UrlUtil.GetCredentialEmbeddedUrl(new Uri(runnerProxy.ProxyAddress), runnerProxy.ProxyUsername, runnerProxy.ProxyPassword); + + // uri.absoluteuri will not contains port info if the scheme is http/https and the port is 80/443 + // however, git.exe always require you provide port info, if nothing passed in, it will use 1080 as default + // as result, we need prefer the uri.originalstring over uri.absoluteuri. + proxyUrlWithCredString = proxyUrlWithCred.OriginalString; + } + + // prepare askpass for client cert private key, if the repository's endpoint url match the runner config url + var systemConnection = executionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + if (runnerCert != null && Uri.Compare(repositoryUrl, systemConnection.Url, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) == 0) + { + if (!string.IsNullOrEmpty(runnerCert.CACertificateFile)) + { + useSelfSignedCACert = true; + } + + if (!string.IsNullOrEmpty(runnerCert.ClientCertificateFile) && + !string.IsNullOrEmpty(runnerCert.ClientCertificatePrivateKeyFile)) + { + useClientCert = true; + + // prepare askpass for client cert password + if (!string.IsNullOrEmpty(runnerCert.ClientCertificatePassword)) + { + clientCertPrivateKeyAskPassFile = Path.Combine(executionContext.GetRunnerContext("temp"), $"{Guid.NewGuid()}.sh"); + List askPass = new List(); + askPass.Add("#!/bin/sh"); + askPass.Add($"echo \"{runnerCert.ClientCertificatePassword}\""); + File.WriteAllLines(clientCertPrivateKeyAskPassFile, askPass); + +#if !OS_WINDOWS + string toolPath = WhichUtil.Which("chmod", true); + string argLine = $"775 {clientCertPrivateKeyAskPassFile}"; + executionContext.Command($"chmod {argLine}"); + + var processInvoker = new ProcessInvoker(executionContext); + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + executionContext.Output(args.Data); + } + }; + processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + executionContext.Output(args.Data); + } + }; + + string workingDirectory = executionContext.GetRunnerContext("workspace"); + await processInvoker.ExecuteAsync(workingDirectory, toolPath, argLine, null, true, CancellationToken.None); +#endif + } + } + } + + // Check the current contents of the root folder to see if there is already a repo + // If there is a repo, see if it matches the one we are expecting to be there based on the remote fetch url + // if the repo is not what we expect, remove the folder + if (!await IsRepositoryOriginUrlMatch(executionContext, gitCommandManager, targetPath, repositoryUrl)) + { + // Delete source folder + IOUtil.DeleteDirectory(targetPath, cancellationToken); + } + else + { + // delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time. + string lockFile = Path.Combine(targetPath, ".git\\index.lock"); + if (File.Exists(lockFile)) + { + try + { + File.Delete(lockFile); + } + catch (Exception ex) + { + executionContext.Debug($"Unable to delete the index.lock file: {lockFile}"); + executionContext.Debug(ex.ToString()); + } + } + + // delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time. + string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock"); + if (File.Exists(shallowLockFile)) + { + try + { + File.Delete(shallowLockFile); + } + catch (Exception ex) + { + executionContext.Debug($"Unable to delete the shallow.lock file: {shallowLockFile}"); + executionContext.Debug(ex.ToString()); + } + } + + // When repo.clean is selected for a git repo, execute git clean -ffdx and git reset --hard HEAD on the current repo. + // This will help us save the time to reclone the entire repo. + // If any git commands exit with non-zero return code or any exception happened during git.exe invoke, fall back to delete the repo folder. + if (clean) + { + Boolean softCleanSucceed = true; + + // git clean -ffdx + int exitCode_clean = await gitCommandManager.GitClean(executionContext, targetPath); + if (exitCode_clean != 0) + { + executionContext.Debug($"'git clean -ffdx' failed with exit code {exitCode_clean}, this normally caused by:\n 1) Path too long\n 2) Permission issue\n 3) File in use\nFor futher investigation, manually run 'git clean -ffdx' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + + // git reset --hard HEAD + if (softCleanSucceed) + { + int exitCode_reset = await gitCommandManager.GitReset(executionContext, targetPath); + if (exitCode_reset != 0) + { + executionContext.Debug($"'git reset --hard HEAD' failed with exit code {exitCode_reset}\nFor futher investigation, manually run 'git reset --hard HEAD' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + } + + // git clean -ffdx and git reset --hard HEAD for each submodule + if (checkoutSubmodules) + { + if (softCleanSucceed) + { + int exitCode_submoduleclean = await gitCommandManager.GitSubmoduleClean(executionContext, targetPath); + if (exitCode_submoduleclean != 0) + { + executionContext.Debug($"'git submodule foreach git clean -ffdx' failed with exit code {exitCode_submoduleclean}\nFor futher investigation, manually run 'git submodule foreach git clean -ffdx' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + } + + if (softCleanSucceed) + { + int exitCode_submodulereset = await gitCommandManager.GitSubmoduleReset(executionContext, targetPath); + if (exitCode_submodulereset != 0) + { + executionContext.Debug($"'git submodule foreach git reset --hard HEAD' failed with exit code {exitCode_submodulereset}\nFor futher investigation, manually run 'git submodule foreach git reset --hard HEAD' on repo root: {targetPath} after each build."); + softCleanSucceed = false; + } + } + } + + if (!softCleanSucceed) + { + //fall back + executionContext.Warning("Unable to run \"git clean -ffdx\" and \"git reset --hard HEAD\" successfully, delete source folder instead."); + IOUtil.DeleteDirectory(targetPath, cancellationToken); + } + } + } + + // if the folder is missing, create it + if (!Directory.Exists(targetPath)) + { + Directory.CreateDirectory(targetPath); + } + + // if the folder contains a .git folder, it means the folder contains a git repo that matches the remote url and in a clean state. + // we will run git fetch to update the repo. + if (!Directory.Exists(Path.Combine(targetPath, ".git"))) + { + // init git repository + int exitCode_init = await gitCommandManager.GitInit(executionContext, targetPath); + if (exitCode_init != 0) + { + throw new InvalidOperationException($"Unable to use git.exe init repository under {targetPath}, 'git init' failed with exit code: {exitCode_init}"); + } + + int exitCode_addremote = await gitCommandManager.GitRemoteAdd(executionContext, targetPath, "origin", repositoryUrl.AbsoluteUri); + if (exitCode_addremote != 0) + { + throw new InvalidOperationException($"Unable to use git.exe add remote 'origin', 'git remote add' failed with exit code: {exitCode_addremote}"); + } + } + + cancellationToken.ThrowIfCancellationRequested(); + + // disable git auto gc + int exitCode_disableGC = await gitCommandManager.GitDisableAutoGC(executionContext, targetPath); + if (exitCode_disableGC != 0) + { + executionContext.Warning("Unable turn off git auto garbage collection, git fetch operation may trigger auto garbage collection which will affect the performance of fetching."); + } + + // always remove any possible left extraheader setting from git config. + if (await gitCommandManager.GitConfigExist(executionContext, targetPath, $"http.{repositoryUrl.AbsoluteUri}.extraheader")) + { + executionContext.Debug("Remove any extraheader setting from git config."); + await RemoveGitConfig(executionContext, gitCommandManager, targetPath, $"http.{repositoryUrl.AbsoluteUri}.extraheader", string.Empty); + } + + // always remove any possible left proxy setting from git config, the proxy setting may contains credential + if (await gitCommandManager.GitConfigExist(executionContext, targetPath, $"http.proxy")) + { + executionContext.Debug("Remove any proxy setting from git config."); + await RemoveGitConfig(executionContext, gitCommandManager, targetPath, $"http.proxy", string.Empty); + } + + List additionalFetchArgs = new List(); + List additionalLfsFetchArgs = new List(); + + // Add http.https://github.com.extraheader=... to gitconfig + // accessToken as basic auth header to handle any auth challenge from github.com + string configKey = $"http.https://github.com/.extraheader"; + string configValue = $"\"AUTHORIZATION: {GenerateBasicAuthHeader(executionContext, accessToken)}\""; + configModifications[configKey] = configValue.Trim('\"'); + int exitCode_config = await gitCommandManager.GitConfig(executionContext, targetPath, configKey, configValue); + if (exitCode_config != 0) + { + throw new InvalidOperationException($"Git config failed with exit code: {exitCode_config}"); + } + + // Prepare proxy config for fetch. + if (runnerProxy != null && !string.IsNullOrEmpty(runnerProxy.ProxyAddress) && !runnerProxy.WebProxy.IsBypassed(repositoryUrl)) + { + executionContext.Debug($"Config proxy server '{runnerProxy.ProxyAddress}' for git fetch."); + ArgUtil.NotNullOrEmpty(proxyUrlWithCredString, nameof(proxyUrlWithCredString)); + additionalFetchArgs.Add($"-c http.proxy=\"{proxyUrlWithCredString}\""); + additionalLfsFetchArgs.Add($"-c http.proxy=\"{proxyUrlWithCredString}\""); + } + + // Prepare ignore ssl cert error config for fetch. + if (acceptUntrustedCerts) + { + additionalFetchArgs.Add($"-c http.sslVerify=false"); + additionalLfsFetchArgs.Add($"-c http.sslVerify=false"); + } + + // Prepare self-signed CA cert config for fetch from server. + if (useSelfSignedCACert) + { + executionContext.Debug($"Use self-signed certificate '{runnerCert.CACertificateFile}' for git fetch."); + additionalFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\""); + additionalLfsFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\""); + } + + // Prepare client cert config for fetch from server. + if (useClientCert) + { + executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git fetch."); + + if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile)) + { + additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\""); + additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\""); + } + else + { + additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\""); + additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\""); + } + } + +#if OS_WINDOWS + if (schannelSslBackend) + { + executionContext.Debug("Use SChannel SslBackend for git fetch."); + additionalFetchArgs.Add("-c http.sslbackend=\"schannel\""); + additionalLfsFetchArgs.Add("-c http.sslbackend=\"schannel\""); + } +#endif + // Prepare gitlfs url for fetch and checkout + if (gitLfsSupport) + { + // Initialize git lfs by execute 'git lfs install' + executionContext.Debug("Setup the local Git hooks for Git LFS."); + int exitCode_lfsInstall = await gitCommandManager.GitLFSInstall(executionContext, targetPath); + if (exitCode_lfsInstall != 0) + { + throw new InvalidOperationException($"Git-lfs installation failed with exit code: {exitCode_lfsInstall}"); + } + } + + List additionalFetchSpecs = new List(); + additionalFetchSpecs.Add("+refs/heads/*:refs/remotes/origin/*"); + + if (IsPullRequest(sourceBranch)) + { + additionalFetchSpecs.Add($"+{sourceBranch}:{GetRemoteRefName(sourceBranch)}"); + } + + int exitCode_fetch = await gitCommandManager.GitFetch(executionContext, targetPath, "origin", fetchDepth, additionalFetchSpecs, string.Join(" ", additionalFetchArgs), cancellationToken); + if (exitCode_fetch != 0) + { + throw new InvalidOperationException($"Git fetch failed with exit code: {exitCode_fetch}"); + } + + // Checkout + // sourceToBuild is used for checkout + // if sourceBranch is a PR branch or sourceVersion is null, make sure branch name is a remote branch. we need checkout to detached head. + // (change refs/heads to refs/remotes/origin, refs/pull to refs/remotes/pull, or leave it as it when the branch name doesn't contain refs/...) + // if sourceVersion provide, just use that for checkout, since when you checkout a commit, it will end up in detached head. + cancellationToken.ThrowIfCancellationRequested(); + string sourcesToBuild; + if (IsPullRequest(sourceBranch) || string.IsNullOrEmpty(sourceVersion)) + { + sourcesToBuild = GetRemoteRefName(sourceBranch); + } + else + { + sourcesToBuild = sourceVersion; + } + + // fetch lfs object upfront, this will avoid fetch lfs object during checkout which cause checkout taking forever + // since checkout will fetch lfs object 1 at a time, while git lfs fetch will fetch lfs object in parallel. + if (gitLfsSupport) + { + int exitCode_lfsFetch = await gitCommandManager.GitLFSFetch(executionContext, targetPath, "origin", sourcesToBuild, string.Join(" ", additionalLfsFetchArgs), cancellationToken); + if (exitCode_lfsFetch != 0) + { + // local repository is shallow repository, lfs fetch may fail due to lack of commits history. + // this will happen when the checkout commit is older than tip -> fetchDepth + if (fetchDepth > 0) + { + executionContext.Warning($"Git lfs fetch failed on shallow repository, this might because of git fetch with depth '{fetchDepth}' doesn't include the lfs fetch commit '{sourcesToBuild}'."); + } + + // git lfs fetch failed, get lfs log, the log is critical for debug. + int exitCode_lfsLogs = await gitCommandManager.GitLFSLogs(executionContext, targetPath); + throw new InvalidOperationException($"Git lfs fetch failed with exit code: {exitCode_lfsFetch}. Git lfs logs returned with exit code: {exitCode_lfsLogs}."); + } + } + + // Finally, checkout the sourcesToBuild (if we didn't find a valid git object this will throw) + int exitCode_checkout = await gitCommandManager.GitCheckout(executionContext, targetPath, sourcesToBuild, cancellationToken); + if (exitCode_checkout != 0) + { + // local repository is shallow repository, checkout may fail due to lack of commits history. + // this will happen when the checkout commit is older than tip -> fetchDepth + if (fetchDepth > 0) + { + executionContext.Warning($"Git checkout failed on shallow repository, this might because of git fetch with depth '{fetchDepth}' doesn't include the checkout commit '{sourcesToBuild}'."); + } + + throw new InvalidOperationException($"Git checkout failed with exit code: {exitCode_checkout}"); + } + + // Submodule update + if (checkoutSubmodules) + { + cancellationToken.ThrowIfCancellationRequested(); + + int exitCode_submoduleSync = await gitCommandManager.GitSubmoduleSync(executionContext, targetPath, checkoutNestedSubmodules, cancellationToken); + if (exitCode_submoduleSync != 0) + { + throw new InvalidOperationException($"Git submodule sync failed with exit code: {exitCode_submoduleSync}"); + } + + List additionalSubmoduleUpdateArgs = new List(); + + // Prepare proxy config for submodule update. + if (runnerProxy != null && !string.IsNullOrEmpty(runnerProxy.ProxyAddress) && !runnerProxy.WebProxy.IsBypassed(repositoryUrl)) + { + executionContext.Debug($"Config proxy server '{runnerProxy.ProxyAddress}' for git submodule update."); + ArgUtil.NotNullOrEmpty(proxyUrlWithCredString, nameof(proxyUrlWithCredString)); + additionalSubmoduleUpdateArgs.Add($"-c http.proxy=\"{proxyUrlWithCredString}\""); + } + + // Prepare ignore ssl cert error config for fetch. + if (acceptUntrustedCerts) + { + additionalSubmoduleUpdateArgs.Add($"-c http.sslVerify=false"); + } + + // Prepare self-signed CA cert config for submodule update. + if (useSelfSignedCACert) + { + executionContext.Debug($"Use self-signed CA certificate '{runnerCert.CACertificateFile}' for git submodule update."); + string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty); + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcainfo=\"{runnerCert.CACertificateFile}\""); + } + + // Prepare client cert config for submodule update. + if (useClientCert) + { + executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git submodule update."); + string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty); + + if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile)) + { + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.{authorityUrl}.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\""); + } + else + { + additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\""); + } + } +#if OS_WINDOWS + if (schannelSslBackend) + { + executionContext.Debug("Use SChannel SslBackend for git submodule update."); + additionalSubmoduleUpdateArgs.Add("-c http.sslbackend=\"schannel\""); + } +#endif + + int exitCode_submoduleUpdate = await gitCommandManager.GitSubmoduleUpdate(executionContext, targetPath, fetchDepth, string.Join(" ", additionalSubmoduleUpdateArgs), checkoutNestedSubmodules, cancellationToken); + if (exitCode_submoduleUpdate != 0) + { + throw new InvalidOperationException($"Git submodule update failed with exit code: {exitCode_submoduleUpdate}"); + } + } + + if (useClientCert && !string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile)) + { + executionContext.Debug("Remove git.sslkey askpass file."); + IOUtil.DeleteFile(clientCertPrivateKeyAskPassFile); + } + + // Set intra-task variable for post job cleanup + executionContext.SetIntraActionState("repositoryPath", targetPath); + executionContext.SetIntraActionState("modifiedgitconfig", JsonUtility.ToString(configModifications.Keys)); + foreach (var config in configModifications) + { + executionContext.SetIntraActionState(config.Key, config.Value); + } + } + + public async Task CleanupAsync(RunnerActionPluginExecutionContext executionContext) + { + ArgUtil.NotNull(executionContext, nameof(executionContext)); + var repositoryPath = Environment.GetEnvironmentVariable("STATE_repositoryPath"); + ArgUtil.NotNullOrEmpty(repositoryPath, nameof(repositoryPath)); + executionContext.Output($"Cleanup cached git credential from {repositoryPath}."); + + // Initialize git command manager + GitCliManager gitCommandManager = new GitCliManager(); + await gitCommandManager.LoadGitExecutionInfo(executionContext); + + executionContext.Debug("Remove any extraheader and proxy setting from git config."); + var configKeys = JsonUtility.FromString>(Environment.GetEnvironmentVariable("STATE_modifiedgitconfig")); + if (configKeys?.Count > 0) + { + foreach (var config in configKeys) + { + var configValue = Environment.GetEnvironmentVariable($"STATE_{config}"); + if (!string.IsNullOrEmpty(configValue)) + { + await RemoveGitConfig(executionContext, gitCommandManager, repositoryPath, config, configValue); + } + } + } + } + + private void RequirementCheck(RunnerActionPluginExecutionContext executionContext, GitCliManager gitCommandManager, bool checkGitLfs) + { + // v2.9 git exist use auth header. + gitCommandManager.EnsureGitVersion(_minGitVersionSupportAuthHeader, throwOnNotMatch: true); + +#if OS_WINDOWS + // check git version for SChannel SSLBackend (Windows Only) + bool schannelSslBackend = StringUtil.ConvertToBoolean(executionContext.GetRunnerContext("gituseschannel")); + if (schannelSslBackend) + { + gitCommandManager.EnsureGitVersion(_minGitVersionSupportSSLBackendOverride, throwOnNotMatch: true); + } +#endif + if (checkGitLfs) + { + // v2.1 git-lfs exist use auth header. + gitCommandManager.EnsureGitLFSVersion(_minGitLfsVersionSupportAuthHeader, throwOnNotMatch: true); + } + } + + private string GenerateBasicAuthHeader(RunnerActionPluginExecutionContext executionContext, string accessToken) + { + // use basic auth header with username:password in base64encoding. + string authHeader = $"x-access-token:{accessToken}"; + string base64encodedAuthHeader = Convert.ToBase64String(Encoding.UTF8.GetBytes(authHeader)); + + // add base64 encoding auth header into secretMasker. + executionContext.AddMask(base64encodedAuthHeader); + return $"basic {base64encodedAuthHeader}"; + } + + private async Task IsRepositoryOriginUrlMatch(RunnerActionPluginExecutionContext context, GitCliManager gitCommandManager, string repositoryPath, Uri expectedRepositoryOriginUrl) + { + context.Debug($"Checking if the repo on {repositoryPath} matches the expected repository origin URL. expected Url: {expectedRepositoryOriginUrl.AbsoluteUri}"); + if (!Directory.Exists(Path.Combine(repositoryPath, ".git"))) + { + // There is no repo directory + context.Debug($"Repository is not found since '.git' directory does not exist under. {repositoryPath}"); + return false; + } + + Uri remoteUrl; + remoteUrl = await gitCommandManager.GitGetFetchUrl(context, repositoryPath); + + if (remoteUrl == null) + { + // origin fetch url not found. + context.Debug("Repository remote origin fetch url is empty."); + return false; + } + + context.Debug($"Repository remote origin fetch url is {remoteUrl}"); + // compare the url passed in with the remote url found + if (expectedRepositoryOriginUrl.Equals(remoteUrl)) + { + context.Debug("URLs match."); + return true; + } + else + { + context.Debug($"The remote.origin.url of the repository under root folder '{repositoryPath}' doesn't matches source repository url."); + return false; + } + } + + private async Task RemoveGitConfig(RunnerActionPluginExecutionContext executionContext, GitCliManager gitCommandManager, string targetPath, string configKey, string configValue) + { + int exitCode_configUnset = await gitCommandManager.GitConfigUnset(executionContext, targetPath, configKey); + if (exitCode_configUnset != 0) + { + // if unable to use git.exe unset http.extraheader, http.proxy or core.askpass, modify git config file on disk. make sure we don't left credential. + if (!string.IsNullOrEmpty(configValue)) + { + executionContext.Warning("An unsuccessful attempt was made using git command line to remove \"http.extraheader\" from the git config. Attempting to modify the git config file directly to remove the credential."); + string gitConfig = Path.Combine(targetPath, ".git/config"); + if (File.Exists(gitConfig)) + { + List safeGitConfig = new List(); + var gitConfigContents = File.ReadAllLines(gitConfig); + foreach (var line in gitConfigContents) + { + if (!line.Contains(configValue)) + { + safeGitConfig.Add(line); + } + } + + File.WriteAllLines(gitConfig, safeGitConfig); + } + } + else + { + executionContext.Warning($"Unable to remove \"{configKey}\" from the git config. To remove the credential, execute \"git config --unset - all {configKey}\" from the repository root \"{targetPath}\"."); + } + } + } + + private bool IsPullRequest(string sourceBranch) + { + return !string.IsNullOrEmpty(sourceBranch) && + (sourceBranch.StartsWith(_pullRefsPrefix, StringComparison.OrdinalIgnoreCase) || + sourceBranch.StartsWith(_remotePullRefsPrefix, StringComparison.OrdinalIgnoreCase)); + } + + private string GetRemoteRefName(string refName) + { + if (string.IsNullOrEmpty(refName)) + { + // If the refName is empty return the remote name for master + refName = _remoteRefsPrefix + "master"; + } + else if (refName.Equals("master", StringComparison.OrdinalIgnoreCase)) + { + // If the refName is master return the remote name for master + refName = _remoteRefsPrefix + refName; + } + else if (refName.StartsWith(_refsPrefix, StringComparison.OrdinalIgnoreCase)) + { + // If the refName is refs/heads change it to the remote version of the name + refName = _remoteRefsPrefix + refName.Substring(_refsPrefix.Length); + } + else if (refName.StartsWith(_pullRefsPrefix, StringComparison.OrdinalIgnoreCase)) + { + // If the refName is refs/pull change it to the remote version of the name + refName = refName.Replace(_pullRefsPrefix, _remotePullRefsPrefix); + } + + return refName; + } + } +} diff --git a/src/Runner.Plugins/Repository/v1.1/RepositoryPlugin.cs b/src/Runner.Plugins/Repository/v1.1/RepositoryPlugin.cs new file mode 100644 index 00000000000..3d6c87422a5 --- /dev/null +++ b/src/Runner.Plugins/Repository/v1.1/RepositoryPlugin.cs @@ -0,0 +1,180 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Sdk; +using Pipelines = GitHub.DistributedTask.Pipelines; +using System.IO; +using GitHub.DistributedTask.Pipelines.ContextData; +using System.Text.RegularExpressions; +using GitHub.DistributedTask.Pipelines.Expressions; +using System.Text; + +namespace GitHub.Runner.Plugins.Repository.v1_1 +{ + public class CheckoutTask : IRunnerActionPlugin + { + public async Task RunAsync(RunnerActionPluginExecutionContext executionContext, CancellationToken token) + { + string runnerWorkspace = executionContext.GetRunnerContext("workspace"); + ArgUtil.Directory(runnerWorkspace, nameof(runnerWorkspace)); + string tempDirectory = executionContext.GetRunnerContext("temp"); + ArgUtil.Directory(tempDirectory, nameof(tempDirectory)); + + var repoFullName = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Repository); + if (string.IsNullOrEmpty(repoFullName)) + { + repoFullName = executionContext.GetGitHubContext("repository"); + } + + var repoFullNameSplit = repoFullName.Split("/", StringSplitOptions.RemoveEmptyEntries); + if (repoFullNameSplit.Length != 2) + { + throw new ArgumentOutOfRangeException(repoFullName); + } + + string expectRepoPath; + var path = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Path); + if (!string.IsNullOrEmpty(path)) + { + expectRepoPath = IOUtil.ResolvePath(runnerWorkspace, path); + if (!expectRepoPath.StartsWith(runnerWorkspace.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar) + Path.DirectorySeparatorChar)) + { + throw new ArgumentException($"Input path '{path}' should resolve to a directory under '{runnerWorkspace}', current resolved path '{expectRepoPath}'."); + } + } + else + { + // When repository doesn't has path set, default to sources directory 1/repoName + expectRepoPath = Path.Combine(runnerWorkspace, repoFullNameSplit[1]); + } + + var workspaceRepo = executionContext.GetGitHubContext("repository"); + // for self repository, we need to let the worker knows where it is after checkout. + if (string.Equals(workspaceRepo, repoFullName, StringComparison.OrdinalIgnoreCase)) + { + var workspaceRepoPath = executionContext.GetGitHubContext("workspace"); + + executionContext.Debug($"Repository requires to be placed at '{expectRepoPath}', current location is '{workspaceRepoPath}'"); + if (!string.Equals(workspaceRepoPath.Trim(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), expectRepoPath.Trim(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), IOUtil.FilePathStringComparison)) + { + executionContext.Output($"Repository is current at '{workspaceRepoPath}', move to '{expectRepoPath}'."); + var count = 1; + var staging = Path.Combine(tempDirectory, $"_{count}"); + while (Directory.Exists(staging)) + { + count++; + staging = Path.Combine(tempDirectory, $"_{count}"); + } + + try + { + executionContext.Debug($"Move existing repository '{workspaceRepoPath}' to '{expectRepoPath}' via staging directory '{staging}'."); + IOUtil.MoveDirectory(workspaceRepoPath, expectRepoPath, staging, CancellationToken.None); + } + catch (Exception ex) + { + executionContext.Debug("Catch exception during repository move."); + executionContext.Debug(ex.ToString()); + executionContext.Warning("Unable move and reuse existing repository to required location."); + IOUtil.DeleteDirectory(expectRepoPath, CancellationToken.None); + } + + executionContext.Output($"Repository will locate at '{expectRepoPath}'."); + } + + executionContext.Debug($"Update workspace repository location."); + executionContext.SetRepositoryPath(repoFullName, expectRepoPath, true); + } + + string sourceBranch; + string sourceVersion; + string refInput = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Ref); + if (string.IsNullOrEmpty(refInput)) + { + sourceBranch = executionContext.GetGitHubContext("ref"); + sourceVersion = executionContext.GetGitHubContext("sha"); + } + else + { + sourceBranch = refInput; + sourceVersion = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Version); // version get removed when checkout move to repo in the graph + if (string.IsNullOrEmpty(sourceVersion) && RegexUtility.IsMatch(sourceBranch, WellKnownRegularExpressions.SHA1)) + { + sourceVersion = sourceBranch; + // If Ref is a SHA and the repo is self, we need to use github.ref as source branch since it might be refs/pull/* + if (string.Equals(workspaceRepo, repoFullName, StringComparison.OrdinalIgnoreCase)) + { + sourceBranch = executionContext.GetGitHubContext("ref"); + } + else + { + sourceBranch = "refs/heads/master"; + } + } + } + + bool clean = StringUtil.ConvertToBoolean(executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Clean), true); + string submoduleInput = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Submodules); + + int fetchDepth = 0; + if (!int.TryParse(executionContext.GetInput("fetch-depth"), out fetchDepth) || fetchDepth < 0) + { + fetchDepth = 0; + } + + bool gitLfsSupport = StringUtil.ConvertToBoolean(executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Lfs)); + string accessToken = executionContext.GetInput(Pipelines.PipelineConstants.CheckoutTaskInputs.Token); + if (string.IsNullOrEmpty(accessToken)) + { + accessToken = executionContext.GetGitHubContext("token"); + } + + // register problem matcher + string matcherFile = Path.Combine(tempDirectory, $"git_{Guid.NewGuid()}.json"); + File.WriteAllText(matcherFile, GitHubSourceProvider.ProblemMatcher, new UTF8Encoding(false)); + executionContext.Output($"##[add-matcher]{matcherFile}"); + try + { + await new GitHubSourceProvider().GetSourceAsync(executionContext, + expectRepoPath, + repoFullName, + sourceBranch, + sourceVersion, + clean, + submoduleInput, + fetchDepth, + gitLfsSupport, + accessToken, + token); + } + finally + { + executionContext.Output("##[remove-matcher owner=checkout-git]"); + } + } + } + + public class CleanupTask : IRunnerActionPlugin + { + public async Task RunAsync(RunnerActionPluginExecutionContext executionContext, CancellationToken token) + { + string tempDirectory = executionContext.GetRunnerContext("temp"); + ArgUtil.Directory(tempDirectory, nameof(tempDirectory)); + + // register problem matcher + string matcherFile = Path.Combine(tempDirectory, $"git_{Guid.NewGuid()}.json"); + File.WriteAllText(matcherFile, GitHubSourceProvider.ProblemMatcher, new UTF8Encoding(false)); + executionContext.Output($"##[add-matcher]{matcherFile}"); + try + { + await new GitHubSourceProvider().CleanupAsync(executionContext); + } + finally + { + executionContext.Output("##[remove-matcher owner=checkout-git]"); + } + } + } +} diff --git a/src/Runner.Plugins/Runner.Plugins.csproj b/src/Runner.Plugins/Runner.Plugins.csproj new file mode 100644 index 00000000000..495bc036dc7 --- /dev/null +++ b/src/Runner.Plugins/Runner.Plugins.csproj @@ -0,0 +1,60 @@ + + + + netcoreapp2.2 + Library + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + + + + + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Runner.Sdk/ActionPlugin.cs b/src/Runner.Sdk/ActionPlugin.cs new file mode 100644 index 00000000000..5f536e74cc9 --- /dev/null +++ b/src/Runner.Sdk/ActionPlugin.cs @@ -0,0 +1,314 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Runtime.InteropServices; +using System.Threading; +using System.Threading.Tasks; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Sdk +{ + public interface IRunnerActionPlugin + { + Task RunAsync(RunnerActionPluginExecutionContext executionContext, CancellationToken token); + } + + public class RunnerActionPluginExecutionContext : ITraceWriter + { + private readonly string DebugEnvironmentalVariable = "ACTIONS_STEP_DEBUG"; + private VssConnection _connection; + private readonly object _stdoutLock = new object(); + private readonly ITraceWriter _trace; // for unit tests + + public RunnerActionPluginExecutionContext() + : this(null) + { } + + public RunnerActionPluginExecutionContext(ITraceWriter trace) + { + _trace = trace; + this.Endpoints = new List(); + this.Inputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + this.Variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + public List Endpoints { get; set; } + public Dictionary Variables { get; set; } + public Dictionary Inputs { get; set; } + public DictionaryContextData Context { get; set; } = new DictionaryContextData(); + + [JsonIgnore] + public VssConnection VssConnection + { + get + { + if (_connection == null) + { + _connection = InitializeVssConnection(); + } + return _connection; + } + } + + public VssConnection InitializeVssConnection() + { + var headerValues = new List(); + headerValues.Add(new ProductInfoHeaderValue($"GitHubActionsRunner-Plugin", BuildConstants.RunnerPackage.Version)); + headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})")); + + if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0) + { + headerValues.AddRange(VssClientHttpRequestSettings.Default.UserAgent); + } + + VssClientHttpRequestSettings.Default.UserAgent = headerValues; + +#if OS_LINUX || OS_OSX + // The .NET Core 2.1 runtime switched its HTTP default from HTTP 1.1 to HTTP 2. + // This causes problems with some versions of the Curl handler. + // See GitHub issue https://github.com/dotnet/corefx/issues/32376 + VssClientHttpRequestSettings.Default.UseHttp11 = true; +#endif + + var certSetting = GetCertConfiguration(); + if (certSetting != null) + { + if (!string.IsNullOrEmpty(certSetting.ClientCertificateArchiveFile)) + { + VssClientHttpRequestSettings.Default.ClientCertificateManager = new RunnerClientCertificateManager(certSetting.ClientCertificateArchiveFile, certSetting.ClientCertificatePassword); + } + + if (certSetting.SkipServerCertificateValidation) + { + VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator; + } + } + + var proxySetting = GetProxyConfiguration(); + if (proxySetting != null) + { + if (!string.IsNullOrEmpty(proxySetting.ProxyAddress)) + { + VssHttpMessageHandler.DefaultWebProxy = new RunnerWebProxyCore(proxySetting.ProxyAddress, proxySetting.ProxyUsername, proxySetting.ProxyPassword, proxySetting.ProxyBypassList); + } + } + + ServiceEndpoint systemConnection = this.Endpoints.FirstOrDefault(e => string.Equals(e.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + ArgUtil.NotNull(systemConnection, nameof(systemConnection)); + ArgUtil.NotNull(systemConnection.Url, nameof(systemConnection.Url)); + + VssCredentials credentials = VssUtil.GetVssCredential(systemConnection); + ArgUtil.NotNull(credentials, nameof(credentials)); + return VssUtil.CreateConnection(systemConnection.Url, credentials); + } + + public string GetInput(string name, bool required = false) + { + string value = null; + if (this.Inputs.ContainsKey(name)) + { + value = this.Inputs[name]; + } + + Debug($"Input '{name}': '{value ?? string.Empty}'"); + + if (string.IsNullOrEmpty(value) && required) + { + throw new ArgumentNullException(name); + } + + return value; + } + + public void Info(string message) + { + Debug(message); + } + + public void Verbose(string message) + { + Debug(message); + } + + public void Error(string message) + { + Output($"##[error]{Escape(message)}"); + } + + public void Debug(string message) + { + var debugString = Variables.GetValueOrDefault(DebugEnvironmentalVariable)?.Value; + if (StringUtil.ConvertToBoolean(debugString)) + { + var multilines = message?.Replace("\r\n", "\n")?.Split("\n"); + if (multilines != null) + { + foreach (var line in multilines) + { + Output($"##[debug]{Escape(line)}"); + } + } + } + } + + public void Warning(string message) + { + Output($"##[warning]{Escape(message)}"); + } + + public void Output(string message) + { + lock (_stdoutLock) + { + if (_trace == null) + { + Console.WriteLine(message); + } + else + { + _trace.Info(message); + } + } + } + + public void AddMask(string secret) + { + Output($"##[add-mask]{Escape(secret)}"); + } + + public void Command(string command) + { + Output($"##[command]{Escape(command)}"); + } + + public void SetRepositoryPath(string repoName, string path, bool workspaceRepo) + { + Output($"##[internal-set-repo-path repoFullName={repoName};workspaceRepo={workspaceRepo.ToString()}]{path}"); + } + + public void SetIntraActionState(string name, string value) + { + Output($"##[save-state name={Escape(name)}]{Escape(value)}"); + } + + public String GetRunnerContext(string contextName) + { + this.Context.TryGetValue("runner", out var context); + var runnerContext = context as DictionaryContextData; + ArgUtil.NotNull(runnerContext, nameof(runnerContext)); + if (runnerContext.TryGetValue(contextName, out var data)) + { + return data as StringContextData; + } + else + { + return null; + } + } + + public String GetGitHubContext(string contextName) + { + this.Context.TryGetValue("github", out var context); + var githubContext = context as DictionaryContextData; + ArgUtil.NotNull(githubContext, nameof(githubContext)); + if (githubContext.TryGetValue(contextName, out var data)) + { + return data as StringContextData; + } + else + { + return null; + } + } + + public RunnerCertificateSettings GetCertConfiguration() + { + bool skipCertValidation = StringUtil.ConvertToBoolean(GetRunnerContext("SkipCertValidation")); + string caFile = GetRunnerContext("CAInfo"); + string clientCertFile = GetRunnerContext("ClientCert"); + + if (!string.IsNullOrEmpty(caFile) || !string.IsNullOrEmpty(clientCertFile) || skipCertValidation) + { + var certConfig = new RunnerCertificateSettings(); + certConfig.SkipServerCertificateValidation = skipCertValidation; + certConfig.CACertificateFile = caFile; + + if (!string.IsNullOrEmpty(clientCertFile)) + { + certConfig.ClientCertificateFile = clientCertFile; + string clientCertKey = GetRunnerContext("ClientCertKey"); + string clientCertArchive = GetRunnerContext("ClientCertArchive"); + string clientCertPassword = GetRunnerContext("ClientCertPassword"); + + certConfig.ClientCertificatePrivateKeyFile = clientCertKey; + certConfig.ClientCertificateArchiveFile = clientCertArchive; + certConfig.ClientCertificatePassword = clientCertPassword; + + certConfig.VssClientCertificateManager = new RunnerClientCertificateManager(clientCertArchive, clientCertPassword); + } + + return certConfig; + } + else + { + return null; + } + } + + public RunnerWebProxySettings GetProxyConfiguration() + { + string proxyUrl = GetRunnerContext("ProxyUrl"); + if (!string.IsNullOrEmpty(proxyUrl)) + { + string proxyUsername = GetRunnerContext("ProxyUsername"); + string proxyPassword = GetRunnerContext("ProxyPassword"); + List proxyBypassHosts = StringUtil.ConvertFromJson>(GetRunnerContext("ProxyBypassList") ?? "[]"); + return new RunnerWebProxySettings() + { + ProxyAddress = proxyUrl, + ProxyUsername = proxyUsername, + ProxyPassword = proxyPassword, + ProxyBypassList = proxyBypassHosts, + WebProxy = new RunnerWebProxyCore(proxyUrl, proxyUsername, proxyPassword, proxyBypassHosts) + }; + } + else + { + return null; + } + } + + private string Escape(string input) + { + foreach (var mapping in _commandEscapeMappings) + { + input = input.Replace(mapping.Key, mapping.Value); + } + + return input; + } + + private Dictionary _commandEscapeMappings = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { + ";", "%3B" + }, + { + "\r", "%0D" + }, + { + "\n", "%0A" + }, + { + "]", "%5D" + }, + }; + } +} diff --git a/src/Runner.Sdk/ITraceWriter.cs b/src/Runner.Sdk/ITraceWriter.cs new file mode 100644 index 00000000000..16ff372f72b --- /dev/null +++ b/src/Runner.Sdk/ITraceWriter.cs @@ -0,0 +1,8 @@ +namespace GitHub.Runner.Sdk +{ + public interface ITraceWriter + { + void Info(string message); + void Verbose(string message); + } +} diff --git a/src/Runner.Sdk/ProcessInvoker.cs b/src/Runner.Sdk/ProcessInvoker.cs new file mode 100644 index 00000000000..5841469144e --- /dev/null +++ b/src/Runner.Sdk/ProcessInvoker.cs @@ -0,0 +1,892 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Text; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Sdk +{ + + // The implementation of the process invoker does not hook up DataReceivedEvent and ErrorReceivedEvent of Process, + // instead, we read both STDOUT and STDERR stream manually on separate thread. + // The reason is we find a huge perf issue about process STDOUT/STDERR with those events. + public sealed class ProcessInvoker : IDisposable + { + private Process _proc; + private Stopwatch _stopWatch; + private int _asyncStreamReaderCount = 0; + private bool _waitingOnStreams = false; + private readonly AsyncManualResetEvent _outputProcessEvent = new AsyncManualResetEvent(); + private readonly TaskCompletionSource _processExitedCompletionSource = new TaskCompletionSource(); + private readonly CancellationTokenSource _processStandardInWriteCancellationTokenSource = new CancellationTokenSource(); + private readonly ConcurrentQueue _errorData = new ConcurrentQueue(); + private readonly ConcurrentQueue _outputData = new ConcurrentQueue(); + private readonly TimeSpan _sigintTimeout = TimeSpan.FromMilliseconds(7500); + private readonly TimeSpan _sigtermTimeout = TimeSpan.FromMilliseconds(2500); + private ITraceWriter Trace { get; set; } + + private class AsyncManualResetEvent + { + private volatile TaskCompletionSource m_tcs = new TaskCompletionSource(); + + public Task WaitAsync() { return m_tcs.Task; } + + public void Set() + { + var tcs = m_tcs; + Task.Factory.StartNew(s => ((TaskCompletionSource)s).TrySetResult(true), + tcs, CancellationToken.None, TaskCreationOptions.PreferFairness, TaskScheduler.Default); + tcs.Task.Wait(); + } + + public void Reset() + { + while (true) + { + var tcs = m_tcs; + if (!tcs.Task.IsCompleted || + Interlocked.CompareExchange(ref m_tcs, new TaskCompletionSource(), tcs) == tcs) + return; + } + } + } + + public event EventHandler OutputDataReceived; + public event EventHandler ErrorDataReceived; + + public ProcessInvoker(ITraceWriter trace) + { + this.Trace = trace; + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: false, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: null, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: false, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: null, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: redirectStandardIn, + inheritConsoleHandler: false, + cancellationToken: cancellationToken); + } + + public Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + CancellationToken cancellationToken) + { + return ExecuteAsync( + workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: redirectStandardIn, + inheritConsoleHandler: inheritConsoleHandler, + keepStandardInOpen: false, + highPriorityProcess: false, + cancellationToken: cancellationToken); + } + + public async Task ExecuteAsync( + string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + Channel redirectStandardIn, + bool inheritConsoleHandler, + bool keepStandardInOpen, + bool highPriorityProcess, + CancellationToken cancellationToken) + { + ArgUtil.Null(_proc, nameof(_proc)); + ArgUtil.NotNullOrEmpty(fileName, nameof(fileName)); + + Trace.Info("Starting process:"); + Trace.Info($" File name: '{fileName}'"); + Trace.Info($" Arguments: '{arguments}'"); + Trace.Info($" Working directory: '{workingDirectory}'"); + Trace.Info($" Require exit code zero: '{requireExitCodeZero}'"); + Trace.Info($" Encoding web name: {outputEncoding?.WebName} ; code page: '{outputEncoding?.CodePage}'"); + Trace.Info($" Force kill process on cancellation: '{killProcessOnCancel}'"); + Trace.Info($" Redirected STDIN: '{redirectStandardIn != null}'"); + Trace.Info($" Persist current code page: '{inheritConsoleHandler}'"); + Trace.Info($" Keep redirected STDIN open: '{keepStandardInOpen}'"); + Trace.Info($" High priority process: '{highPriorityProcess}'"); + + _proc = new Process(); + _proc.StartInfo.FileName = fileName; + _proc.StartInfo.Arguments = arguments; + _proc.StartInfo.WorkingDirectory = workingDirectory; + _proc.StartInfo.UseShellExecute = false; + _proc.StartInfo.CreateNoWindow = !inheritConsoleHandler; + _proc.StartInfo.RedirectStandardInput = true; + _proc.StartInfo.RedirectStandardError = true; + _proc.StartInfo.RedirectStandardOutput = true; + + // Ensure we process STDERR even the process exit event happen before we start read STDERR stream. + if (_proc.StartInfo.RedirectStandardError) + { + Interlocked.Increment(ref _asyncStreamReaderCount); + } + + // Ensure we process STDOUT even the process exit event happen before we start read STDOUT stream. + if (_proc.StartInfo.RedirectStandardOutput) + { + Interlocked.Increment(ref _asyncStreamReaderCount); + } + +#if OS_WINDOWS + // If StandardErrorEncoding or StandardOutputEncoding is not specified the on the + // ProcessStartInfo object, then .NET PInvokes to resolve the default console output + // code page: + // [DllImport("api-ms-win-core-console-l1-1-0.dll", SetLastError = true)] + // public extern static uint GetConsoleOutputCP(); + StringUtil.EnsureRegisterEncodings(); +#endif + if (outputEncoding != null) + { + _proc.StartInfo.StandardErrorEncoding = outputEncoding; + _proc.StartInfo.StandardOutputEncoding = outputEncoding; + } + + // Copy the environment variables. + if (environment != null && environment.Count > 0) + { + foreach (KeyValuePair kvp in environment) + { + _proc.StartInfo.Environment[kvp.Key] = kvp.Value; + } + } + + // Indicate GitHub Actions process. + _proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true"; + + // Hook up the events. + _proc.EnableRaisingEvents = true; + _proc.Exited += ProcessExitedHandler; + + // Start the process. + _stopWatch = Stopwatch.StartNew(); + _proc.Start(); + + // Decrease invoked process priority, in platform specifc way, relative to parent + if (!highPriorityProcess) + { + DecreaseProcessPriority(_proc); + } + + // Start the standard error notifications, if appropriate. + if (_proc.StartInfo.RedirectStandardError) + { + StartReadStream(_proc.StandardError, _errorData); + } + + // Start the standard output notifications, if appropriate. + if (_proc.StartInfo.RedirectStandardOutput) + { + StartReadStream(_proc.StandardOutput, _outputData); + } + + if (_proc.StartInfo.RedirectStandardInput) + { + if (redirectStandardIn != null) + { + StartWriteStream(redirectStandardIn, _proc.StandardInput, keepStandardInOpen); + } + else + { + // Close the input stream. This is done to prevent commands from blocking the build waiting for input from the user. + _proc.StandardInput.Close(); + } + } + + using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel))) + { + Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit."); + while (true) + { + Task outputSignal = _outputProcessEvent.WaitAsync(); + var signaled = await Task.WhenAny(outputSignal, _processExitedCompletionSource.Task); + + if (signaled == outputSignal) + { + ProcessOutput(); + } + else + { + _stopWatch.Stop(); + break; + } + } + + // Just in case there was some pending output when the process shut down go ahead and check the + // data buffers one last time before returning + ProcessOutput(); + + Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}."); + } + + cancellationToken.ThrowIfCancellationRequested(); + + // Wait for process to finish. + if (_proc.ExitCode != 0 && requireExitCodeZero) + { + throw new ProcessExitCodeException(exitCode: _proc.ExitCode, fileName: fileName, arguments: arguments); + } + + return _proc.ExitCode; + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (_proc != null) + { + _proc.Dispose(); + _proc = null; + } + } + } + + private void ProcessOutput() + { + List errorData = new List(); + List outputData = new List(); + + string errorLine; + while (_errorData.TryDequeue(out errorLine)) + { + errorData.Add(errorLine); + } + + string outputLine; + while (_outputData.TryDequeue(out outputLine)) + { + outputData.Add(outputLine); + } + + _outputProcessEvent.Reset(); + + // Write the error lines. + if (errorData != null && this.ErrorDataReceived != null) + { + foreach (string line in errorData) + { + if (line != null) + { + this.ErrorDataReceived(this, new ProcessDataReceivedEventArgs(line)); + } + } + } + + // Process the output lines. + if (outputData != null && this.OutputDataReceived != null) + { + foreach (string line in outputData) + { + if (line != null) + { + // The line is output from the process that was invoked. + this.OutputDataReceived(this, new ProcessDataReceivedEventArgs(line)); + } + } + } + } + + private async Task CancelAndKillProcessTree(bool killProcessOnCancel) + { + ArgUtil.NotNull(_proc, nameof(_proc)); + if (!killProcessOnCancel) + { + bool sigint_succeed = await SendSIGINT(_sigintTimeout); + if (sigint_succeed) + { + Trace.Info("Process cancelled successfully through Ctrl+C/SIGINT."); + return; + } + + bool sigterm_succeed = await SendSIGTERM(_sigtermTimeout); + if (sigterm_succeed) + { + Trace.Info("Process terminate successfully through Ctrl+Break/SIGTERM."); + return; + } + } + + Trace.Info("Kill entire process tree since both cancel and terminate signal has been ignored by the target process."); + KillProcessTree(); + } + + private async Task SendSIGINT(TimeSpan timeout) + { +#if OS_WINDOWS + return await SendCtrlSignal(ConsoleCtrlEvent.CTRL_C, timeout); +#else + return await SendSignal(Signals.SIGINT, timeout); +#endif + } + + private async Task SendSIGTERM(TimeSpan timeout) + { +#if OS_WINDOWS + return await SendCtrlSignal(ConsoleCtrlEvent.CTRL_BREAK, timeout); +#else + return await SendSignal(Signals.SIGTERM, timeout); +#endif + } + + private void ProcessExitedHandler(object sender, EventArgs e) + { + if ((_proc.StartInfo.RedirectStandardError || _proc.StartInfo.RedirectStandardOutput) && _asyncStreamReaderCount != 0) + { + _waitingOnStreams = true; + + Task.Run(async () => + { + // Wait 5 seconds and then Cancel/Kill process tree + await Task.Delay(TimeSpan.FromSeconds(5)); + KillProcessTree(); + _processExitedCompletionSource.TrySetResult(true); + _processStandardInWriteCancellationTokenSource.Cancel(); + }); + } + else + { + _processExitedCompletionSource.TrySetResult(true); + _processStandardInWriteCancellationTokenSource.Cancel(); + } + } + + private void StartReadStream(StreamReader reader, ConcurrentQueue dataBuffer) + { + Task.Run(() => + { + while (!reader.EndOfStream) + { + string line = reader.ReadLine(); + if (line != null) + { + dataBuffer.Enqueue(line); + _outputProcessEvent.Set(); + } + } + + Trace.Info("STDOUT/STDERR stream read finished."); + + if (Interlocked.Decrement(ref _asyncStreamReaderCount) == 0 && _waitingOnStreams) + { + _processExitedCompletionSource.TrySetResult(true); + _processStandardInWriteCancellationTokenSource.Cancel(); + } + }); + } + + private void StartWriteStream(Channel redirectStandardIn, StreamWriter standardIn, bool keepStandardInOpen) + { + Task.Run(async () => + { + // Write the contents as UTF8 to handle all characters. + var utf8Writer = new StreamWriter(standardIn.BaseStream, new UTF8Encoding(false)); + + while (!_processExitedCompletionSource.Task.IsCompleted) + { + ValueTask dequeueTask = redirectStandardIn.Reader.ReadAsync(_processStandardInWriteCancellationTokenSource.Token); + string input = await dequeueTask; + if (input != null) + { + utf8Writer.WriteLine(input); + utf8Writer.Flush(); + + if (!keepStandardInOpen) + { + Trace.Info("Close STDIN after the first redirect finished."); + standardIn.Close(); + break; + } + } + } + + Trace.Info("STDIN stream write finished."); + }); + } + + private void KillProcessTree() + { +#if OS_WINDOWS + WindowsKillProcessTree(); +#else + NixKillProcessTree(); +#endif + } + + private void DecreaseProcessPriority(Process process) + { +#if OS_LINUX + int oomScoreAdj = 500; + string userOomScoreAdj; + if (process.StartInfo.Environment.TryGetValue("PIPELINE_JOB_OOMSCOREADJ", out userOomScoreAdj)) + { + int userOomScoreAdjParsed; + if (int.TryParse(userOomScoreAdj, out userOomScoreAdjParsed) && userOomScoreAdjParsed >= -1000 && userOomScoreAdjParsed <= 1000) + { + oomScoreAdj = userOomScoreAdjParsed; + } + else + { + Trace.Info($"Invalid PIPELINE_JOB_OOMSCOREADJ ({userOomScoreAdj}). Valid range is -1000:1000. Using default 500."); + } + } + // Values (up to 1000) make the process more likely to be killed under OOM scenario, + // protecting the agent by extension. Default of 500 is likely to get killed, but can + // be adjusted up or down as appropriate. + WriteProcessOomScoreAdj(process.Id, oomScoreAdj); +#endif + } + +#if OS_WINDOWS + private async Task SendCtrlSignal(ConsoleCtrlEvent signal, TimeSpan timeout) + { + Trace.Info($"Sending {signal} to process {_proc.Id}."); + ConsoleCtrlDelegate ctrlEventHandler = new ConsoleCtrlDelegate(ConsoleCtrlHandler); + try + { + if (!FreeConsole()) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (!AttachConsole(_proc.Id)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (!SetConsoleCtrlHandler(ctrlEventHandler, true)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + if (!GenerateConsoleCtrlEvent(signal, 0)) + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + + Trace.Info($"Successfully send {signal} to process {_proc.Id}."); + Trace.Info($"Waiting for process exit or {timeout.TotalSeconds} seconds after {signal} signal fired."); + var completedTask = await Task.WhenAny(Task.Delay(timeout), _processExitedCompletionSource.Task); + if (completedTask == _processExitedCompletionSource.Task) + { + Trace.Info("Process exit successfully."); + return true; + } + else + { + Trace.Info($"Process did not honor {signal} signal within {timeout.TotalSeconds} seconds."); + return false; + } + } + catch (Exception ex) + { + Trace.Info($"{signal} signal doesn't fire successfully."); + Trace.Verbose($"Catch exception during send {signal} event to process {_proc.Id}"); + Trace.Verbose(ex.ToString()); + return false; + } + finally + { + FreeConsole(); + SetConsoleCtrlHandler(ctrlEventHandler, false); + } + } + + private bool ConsoleCtrlHandler(ConsoleCtrlEvent ctrlType) + { + switch (ctrlType) + { + case ConsoleCtrlEvent.CTRL_C: + Trace.Info($"Ignore Ctrl+C to current process."); + // We return True, so the default Ctrl handler will not take action. + return true; + case ConsoleCtrlEvent.CTRL_BREAK: + Trace.Info($"Ignore Ctrl+Break to current process."); + // We return True, so the default Ctrl handler will not take action. + return true; + } + + // If the function handles the control signal, it should return TRUE. + // If it returns FALSE, the next handler function in the list of handlers for this process is used. + return false; + } + + private void WindowsKillProcessTree() + { + var pid = _proc?.Id; + if (pid == null) + { + // process already exit, stop here. + return; + } + + Dictionary processRelationship = new Dictionary(); + Trace.Info($"Scan all processes to find relationship between all processes."); + foreach (Process proc in Process.GetProcesses()) + { + try + { + if (!proc.SafeHandle.IsInvalid) + { + PROCESS_BASIC_INFORMATION pbi = new PROCESS_BASIC_INFORMATION(); + int returnLength = 0; + int queryResult = NtQueryInformationProcess(proc.SafeHandle.DangerousGetHandle(), PROCESSINFOCLASS.ProcessBasicInformation, ref pbi, Marshal.SizeOf(pbi), ref returnLength); + if (queryResult == 0) // == 0 is OK + { + Trace.Verbose($"Process: {proc.Id} is child process of {pbi.InheritedFromUniqueProcessId}."); + processRelationship[proc.Id] = (int)pbi.InheritedFromUniqueProcessId; + } + else + { + throw new Win32Exception(Marshal.GetLastWin32Error()); + } + } + } + catch (Exception ex) + { + // Ignore all exceptions, since KillProcessTree is best effort. + Trace.Verbose("Ignore any catched exception during detecting process relationship."); + Trace.Verbose(ex.ToString()); + } + } + + Trace.Verbose($"Start killing process tree of process '{pid.Value}'."); + Stack processesNeedtoKill = new Stack(); + processesNeedtoKill.Push(new ProcessTerminationInfo(pid.Value, false)); + while (processesNeedtoKill.Count() > 0) + { + ProcessTerminationInfo procInfo = processesNeedtoKill.Pop(); + List childProcessesIds = new List(); + if (!procInfo.ChildPidExpanded) + { + Trace.Info($"Find all child processes of process '{procInfo.Pid}'."); + childProcessesIds = processRelationship.Where(p => p.Value == procInfo.Pid).Select(k => k.Key).ToList(); + } + + if (childProcessesIds.Count > 0) + { + Trace.Info($"Need kill all child processes trees before kill process '{procInfo.Pid}'."); + processesNeedtoKill.Push(new ProcessTerminationInfo(procInfo.Pid, true)); + foreach (var childPid in childProcessesIds) + { + Trace.Info($"Child process '{childPid}' needs be killed first."); + processesNeedtoKill.Push(new ProcessTerminationInfo(childPid, false)); + } + } + else + { + Trace.Info($"Kill process '{procInfo.Pid}'."); + try + { + Process leafProcess = Process.GetProcessById(procInfo.Pid); + try + { + leafProcess.Kill(); + } + catch (InvalidOperationException ex) + { + // The process has already exited + Trace.Verbose("Ignore InvalidOperationException during Process.Kill()."); + Trace.Verbose(ex.ToString()); + } + catch (Win32Exception ex) when (ex.NativeErrorCode == 5) + { + // The associated process could not be terminated + // The process is terminating + // NativeErrorCode 5 means Access Denied + Trace.Verbose("Ignore Win32Exception with NativeErrorCode 5 during Process.Kill()."); + Trace.Verbose(ex.ToString()); + } + catch (Exception ex) + { + // Ignore any additional exception + Trace.Verbose("Ignore additional exceptions during Process.Kill()."); + Trace.Verbose(ex.ToString()); + } + } + catch (ArgumentException ex) + { + // process already gone, nothing needs killed. + Trace.Verbose("Ignore ArgumentException during Process.GetProcessById()."); + Trace.Verbose(ex.ToString()); + } + catch (Exception ex) + { + // Ignore any additional exception + Trace.Verbose("Ignore additional exceptions during Process.GetProcessById()."); + Trace.Verbose(ex.ToString()); + } + } + } + } + + private class ProcessTerminationInfo + { + public ProcessTerminationInfo(int pid, bool expanded) + { + Pid = pid; + ChildPidExpanded = expanded; + } + + public int Pid { get; } + public bool ChildPidExpanded { get; } + } + + private enum ConsoleCtrlEvent + { + CTRL_C = 0, + CTRL_BREAK = 1 + } + + private enum PROCESSINFOCLASS : int + { + ProcessBasicInformation = 0 + }; + + [StructLayout(LayoutKind.Sequential)] + private struct PROCESS_BASIC_INFORMATION + { + public long ExitStatus; + public long PebBaseAddress; + public long AffinityMask; + public long BasePriority; + public long UniqueProcessId; + public long InheritedFromUniqueProcessId; + }; + + + [DllImport("ntdll.dll", SetLastError = true)] + private static extern int NtQueryInformationProcess(IntPtr processHandle, PROCESSINFOCLASS processInformationClass, ref PROCESS_BASIC_INFORMATION processInformation, int processInformationLength, ref int returnLength); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool GenerateConsoleCtrlEvent(ConsoleCtrlEvent sigevent, int dwProcessGroupId); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool FreeConsole(); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool AttachConsole(int dwProcessId); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool SetConsoleCtrlHandler(ConsoleCtrlDelegate HandlerRoutine, bool Add); + + // Delegate type to be used as the Handler Routine for SetConsoleCtrlHandler + private delegate Boolean ConsoleCtrlDelegate(ConsoleCtrlEvent CtrlType); +#else + private async Task SendSignal(Signals signal, TimeSpan timeout) + { + Trace.Info($"Sending {signal} to process {_proc.Id}."); + int errorCode = kill(_proc.Id, (int)signal); + if (errorCode != 0) + { + Trace.Info($"{signal} signal doesn't fire successfully."); + Trace.Info($"Error code: {errorCode}."); + return false; + } + + Trace.Info($"Successfully send {signal} to process {_proc.Id}."); + Trace.Info($"Waiting for process exit or {timeout.TotalSeconds} seconds after {signal} signal fired."); + var completedTask = await Task.WhenAny(Task.Delay(timeout), _processExitedCompletionSource.Task); + if (completedTask == _processExitedCompletionSource.Task) + { + Trace.Info("Process exit successfully."); + return true; + } + else + { + Trace.Info($"Process did not honor {signal} signal within {timeout.TotalSeconds} seconds."); + return false; + } + } + + private void NixKillProcessTree() + { + try + { + if (_proc?.HasExited == false) + { + _proc?.Kill(); + } + } + catch (InvalidOperationException ex) + { + Trace.Info("Ignore InvalidOperationException during Process.Kill()."); + Trace.Info(ex.ToString()); + } + } + +#if OS_LINUX + private void WriteProcessOomScoreAdj(int processId, int oomScoreAdj) + { + try + { + string procFilePath = $"/proc/{processId}/oom_score_adj"; + if (File.Exists(procFilePath)) + { + File.WriteAllText(procFilePath, oomScoreAdj.ToString()); + Trace.Info($"Updated oom_score_adj to {oomScoreAdj} for PID: {processId}."); + } + } + catch (Exception ex) + { + Trace.Info($"Failed to update oom_score_adj for PID: {processId}."); + Trace.Info(ex.ToString()); + } + } +#endif + + private enum Signals : int + { + SIGINT = 2, + SIGTERM = 15 + } + + [DllImport("libc", SetLastError = true)] + private static extern int kill(int pid, int sig); +#endif + } + + public sealed class ProcessExitCodeException : Exception + { + public int ExitCode { get; private set; } + + public ProcessExitCodeException(int exitCode, string fileName, string arguments) + : base($"Exit code {exitCode} returned from process: file name '{fileName}', arguments '{arguments}'.") + { + ExitCode = exitCode; + } + } + + public sealed class ProcessDataReceivedEventArgs : EventArgs + { + public ProcessDataReceivedEventArgs(string data) + { + Data = data; + } + + public string Data { get; set; } + } +} diff --git a/src/Runner.Sdk/Runner.Sdk.csproj b/src/Runner.Sdk/Runner.Sdk.csproj new file mode 100644 index 00000000000..f8113085f44 --- /dev/null +++ b/src/Runner.Sdk/Runner.Sdk.csproj @@ -0,0 +1,65 @@ + + + + netcoreapp2.2 + Library + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + + + + + + + + + + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Runner.Sdk/RunnerClientCertificateManager.cs b/src/Runner.Sdk/RunnerClientCertificateManager.cs new file mode 100644 index 00000000000..a64b86e509d --- /dev/null +++ b/src/Runner.Sdk/RunnerClientCertificateManager.cs @@ -0,0 +1,40 @@ + +using System.Security.Cryptography.X509Certificates; +using GitHub.Services.Common; + +namespace GitHub.Runner.Sdk +{ + public class RunnerCertificateSettings + { + public bool SkipServerCertificateValidation { get; set; } + public string CACertificateFile { get; set; } + public string ClientCertificateFile { get; set; } + public string ClientCertificatePrivateKeyFile { get; set; } + public string ClientCertificateArchiveFile { get; set; } + public string ClientCertificatePassword { get; set; } + public IVssClientCertificateManager VssClientCertificateManager { get; set; } + } + + public class RunnerClientCertificateManager : IVssClientCertificateManager + { + private readonly X509Certificate2Collection _clientCertificates = new X509Certificate2Collection(); + public X509Certificate2Collection ClientCertificates => _clientCertificates; + + public RunnerClientCertificateManager() + { + } + + public RunnerClientCertificateManager(string clientCertificateArchiveFile, string clientCertificatePassword) + { + AddClientCertificate(clientCertificateArchiveFile, clientCertificatePassword); + } + + public void AddClientCertificate(string clientCertificateArchiveFile, string clientCertificatePassword) + { + if (!string.IsNullOrEmpty(clientCertificateArchiveFile)) + { + _clientCertificates.Add(new X509Certificate2(clientCertificateArchiveFile, clientCertificatePassword)); + } + } + } +} diff --git a/src/Runner.Sdk/RunnerWebProxyCore.cs b/src/Runner.Sdk/RunnerWebProxyCore.cs new file mode 100644 index 00000000000..e1fcc729cfa --- /dev/null +++ b/src/Runner.Sdk/RunnerWebProxyCore.cs @@ -0,0 +1,104 @@ +using System; +using System.Collections.Generic; +using System.Net; +using System.Text.RegularExpressions; + +namespace GitHub.Runner.Sdk +{ + public class RunnerWebProxySettings + { + public string ProxyAddress { get; set; } + public string ProxyUsername { get; set; } + public string ProxyPassword { get; set; } + public List ProxyBypassList { get; set; } + public IWebProxy WebProxy { get; set; } + } + + public class RunnerWebProxyCore : IWebProxy + { + private string _proxyAddress; + private readonly List _regExBypassList = new List(); + + public ICredentials Credentials { get; set; } + + public RunnerWebProxyCore() + { + } + + public RunnerWebProxyCore(string proxyAddress, string proxyUsername, string proxyPassword, List proxyBypassList) + { + Update(proxyAddress, proxyUsername, proxyPassword, proxyBypassList); + } + + public void Update(string proxyAddress, string proxyUsername, string proxyPassword, List proxyBypassList) + { + _proxyAddress = proxyAddress?.Trim(); + + if (string.IsNullOrEmpty(proxyUsername) || string.IsNullOrEmpty(proxyPassword)) + { + Credentials = CredentialCache.DefaultNetworkCredentials; + } + else + { + Credentials = new NetworkCredential(proxyUsername, proxyPassword); + } + + if (proxyBypassList != null) + { + foreach (string bypass in proxyBypassList) + { + if (string.IsNullOrWhiteSpace(bypass)) + { + continue; + } + else + { + try + { + Regex bypassRegex = new Regex(bypass.Trim(), RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.ECMAScript); + _regExBypassList.Add(bypassRegex); + } + catch (Exception) + { + // eat all exceptions + } + } + } + } + } + + public Uri GetProxy(Uri destination) + { + if (IsBypassed(destination)) + { + return destination; + } + else + { + return new Uri(_proxyAddress); + } + } + + public bool IsBypassed(Uri uri) + { + return string.IsNullOrEmpty(_proxyAddress) || uri.IsLoopback || IsMatchInBypassList(uri); + } + + private bool IsMatchInBypassList(Uri input) + { + string matchUriString = input.IsDefaultPort ? + input.Scheme + "://" + input.Host : + input.Scheme + "://" + input.Host + ":" + input.Port.ToString(); + + foreach (Regex r in _regExBypassList) + { + if (r.IsMatch(matchUriString)) + { + return true; + } + } + + return false; + } + } +} diff --git a/src/Runner.Sdk/Util/ArgUtil.cs b/src/Runner.Sdk/Util/ArgUtil.cs new file mode 100644 index 00000000000..4bf9213fdbe --- /dev/null +++ b/src/Runner.Sdk/Util/ArgUtil.cs @@ -0,0 +1,78 @@ +using System; +using System.IO; + +namespace GitHub.Runner.Sdk +{ + public static class ArgUtil + { + public static void Directory(string directory, string name) + { + ArgUtil.NotNullOrEmpty(directory, name); + if (!System.IO.Directory.Exists(directory)) + { + throw new DirectoryNotFoundException( + message: $"Directory not found: '{directory}'"); + } + } + + public static void Equal(T expected, T actual, string name) + { + if (object.ReferenceEquals(expected, actual)) + { + return; + } + + if (object.ReferenceEquals(expected, null) || + !expected.Equals(actual)) + { + throw new ArgumentOutOfRangeException( + paramName: name, + actualValue: actual, + message: $"{name} does not equal expected value. Expected '{expected}'. Actual '{actual}'."); + } + } + + public static void File(string fileName, string name) + { + ArgUtil.NotNullOrEmpty(fileName, name); + if (!System.IO.File.Exists(fileName)) + { + throw new FileNotFoundException( + message: $"File not found: '{fileName}'", + fileName: fileName); + } + } + + public static void NotNull(object value, string name) + { + if (object.ReferenceEquals(value, null)) + { + throw new ArgumentNullException(name); + } + } + + public static void NotNullOrEmpty(string value, string name) + { + if (string.IsNullOrEmpty(value)) + { + throw new ArgumentNullException(name); + } + } + + public static void NotEmpty(Guid value, string name) + { + if (value == Guid.Empty) + { + throw new ArgumentNullException(name); + } + } + + public static void Null(object value, string name) + { + if (!object.ReferenceEquals(value, null)) + { + throw new ArgumentException(message: $"{name} should be null.", paramName: name); + } + } + } +} diff --git a/src/Runner.Sdk/Util/IOUtil.cs b/src/Runner.Sdk/Util/IOUtil.cs new file mode 100644 index 00000000000..45957dee7b3 --- /dev/null +++ b/src/Runner.Sdk/Util/IOUtil.cs @@ -0,0 +1,467 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Security.Cryptography; +using System.Text; +using System.Threading; + +namespace GitHub.Runner.Sdk +{ + public static class IOUtil + { + public static string ExeExtension + { + get + { +#if OS_WINDOWS + return ".exe"; +#else + return string.Empty; +#endif + } + } + + public static StringComparison FilePathStringComparison + { + get + { +#if OS_LINUX + return StringComparison.Ordinal; +#else + return StringComparison.OrdinalIgnoreCase; +#endif + } + } + + public static void SaveObject(object obj, string path) + { + File.WriteAllText(path, StringUtil.ConvertToJson(obj), Encoding.UTF8); + } + + public static T LoadObject(string path) + { + string json = File.ReadAllText(path, Encoding.UTF8); + return StringUtil.ConvertFromJson(json); + } + + public static string GetPathHash(string path) + { + string hashString = path.ToLowerInvariant(); + using (SHA256 sha256hash = SHA256.Create()) + { + byte[] data = sha256hash.ComputeHash(Encoding.UTF8.GetBytes(hashString)); + StringBuilder sBuilder = new StringBuilder(); + for (int i = 0; i < data.Length; i++) + { + sBuilder.Append(data[i].ToString("x2")); + } + + string hash = sBuilder.ToString(); + return hash; + } + } + + public static void Delete(string path, CancellationToken cancellationToken) + { + DeleteDirectory(path, cancellationToken); + DeleteFile(path); + } + + public static void DeleteDirectory(string path, CancellationToken cancellationToken) + { + DeleteDirectory(path, contentsOnly: false, continueOnContentDeleteError: false, cancellationToken: cancellationToken); + } + + public static void DeleteDirectory(string path, bool contentsOnly, bool continueOnContentDeleteError, CancellationToken cancellationToken) + { + ArgUtil.NotNullOrEmpty(path, nameof(path)); + DirectoryInfo directory = new DirectoryInfo(path); + if (!directory.Exists) + { + return; + } + + if (!contentsOnly) + { + // Remove the readonly flag. + RemoveReadOnly(directory); + + // Check if the directory is a reparse point. + if (directory.Attributes.HasFlag(FileAttributes.ReparsePoint)) + { + // Delete the reparse point directory and short-circuit. + directory.Delete(); + return; + } + } + + // Initialize a concurrent stack to store the directories. The directories + // cannot be deleted until the files are deleted. + var directories = new ConcurrentStack(); + + if (!contentsOnly) + { + directories.Push(directory); + } + + // Create a new token source for the parallel query. The parallel query should be + // canceled after the first error is encountered. Otherwise the number of exceptions + // could get out of control for a large directory with access denied on every file. + using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken)) + { + try + { + // Recursively delete all files and store all subdirectories. + Enumerate(directory, tokenSource) + .AsParallel() + .WithCancellation(tokenSource.Token) + .ForAll((FileSystemInfo item) => + { + bool success = false; + try + { + // Remove the readonly attribute. + RemoveReadOnly(item); + + // Check if the item is a file. + if (item is FileInfo) + { + // Delete the file. + item.Delete(); + } + else + { + // Check if the item is a directory reparse point. + var subdirectory = item as DirectoryInfo; + ArgUtil.NotNull(subdirectory, nameof(subdirectory)); + if (subdirectory.Attributes.HasFlag(FileAttributes.ReparsePoint)) + { + try + { + // Delete the reparse point. + subdirectory.Delete(); + } + catch (DirectoryNotFoundException) + { + // The target of the reparse point directory has been deleted. + // Therefore the item is no longer a directory and is now a file. + // + // Deletion of reparse point directories happens in parallel. This case can occur + // when reparse point directory FOO points to some other reparse point directory BAR, + // and BAR is deleted after the DirectoryInfo for FOO has already been initialized. + File.Delete(subdirectory.FullName); + } + } + else + { + // Store the directory. + directories.Push(subdirectory); + } + } + + success = true; + } + catch (Exception) when (continueOnContentDeleteError) + { + // ignore any exception when continueOnContentDeleteError is true. + success = true; + } + finally + { + if (!success) + { + tokenSource.Cancel(); // Cancel is thread-safe. + } + } + }); + } + catch (Exception) + { + tokenSource.Cancel(); + throw; + } + } + + // Delete the directories. + foreach (DirectoryInfo dir in directories.OrderByDescending(x => x.FullName.Length)) + { + cancellationToken.ThrowIfCancellationRequested(); + dir.Delete(); + } + } + + public static void DeleteFile(string path) + { + ArgUtil.NotNullOrEmpty(path, nameof(path)); + var file = new FileInfo(path); + if (file.Exists) + { + RemoveReadOnly(file); + file.Delete(); + } + } + + public static void MoveDirectory(string sourceDir, string targetDir, string stagingDir, CancellationToken token) + { + ArgUtil.Directory(sourceDir, nameof(sourceDir)); + ArgUtil.NotNullOrEmpty(targetDir, nameof(targetDir)); + ArgUtil.NotNullOrEmpty(stagingDir, nameof(stagingDir)); + + // delete existing stagingDir + DeleteDirectory(stagingDir, token); + + // make sure parent dir of stagingDir exist + Directory.CreateDirectory(Path.GetDirectoryName(stagingDir)); + + // move source to staging + Directory.Move(sourceDir, stagingDir); + + // delete existing targetDir + DeleteDirectory(targetDir, token); + + // make sure parent dir of targetDir exist + Directory.CreateDirectory(Path.GetDirectoryName(targetDir)); + + // move staging to target + Directory.Move(stagingDir, targetDir); + } + + /// + /// Given a path and directory, return the path relative to the directory. If the path is not + /// under the directory the path is returned un modified. Examples: + /// MakeRelative(@"d:\src\project\foo.cpp", @"d:\src") -> @"project\foo.cpp" + /// MakeRelative(@"d:\src\project\foo.cpp", @"d:\specs") -> @"d:\src\project\foo.cpp" + /// MakeRelative(@"d:\src\project\foo.cpp", @"d:\src\proj") -> @"d:\src\project\foo.cpp" + /// + /// Safe for remote paths. Does not access the local disk. + /// Path to make relative. + /// Folder to make it relative to. + /// Relative path. + public static string MakeRelative(string path, string folder) + { + ArgUtil.NotNullOrEmpty(path, nameof(path)); + ArgUtil.NotNull(folder, nameof(folder)); + + // Replace all Path.AltDirectorySeparatorChar with Path.DirectorySeparatorChar from both inputs + path = path.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar); + folder = folder.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar); + + // Check if the dir is a prefix of the path (if not, it isn't relative at all). + if (!path.StartsWith(folder, IOUtil.FilePathStringComparison)) + { + return path; + } + + // Dir is a prefix of the path, if they are the same length then the relative path is empty. + if (path.Length == folder.Length) + { + return string.Empty; + } + + // If the dir ended in a '\\' (like d:\) or '/' (like user/bin/) then we have a relative path. + if (folder.Length > 0 && folder[folder.Length - 1] == Path.DirectorySeparatorChar) + { + return path.Substring(folder.Length); + } + // The next character needs to be a '\\' or they aren't really relative. + else if (path[folder.Length] == Path.DirectorySeparatorChar) + { + return path.Substring(folder.Length + 1); + } + else + { + return path; + } + } + + public static string ResolvePath(String rootPath, String relativePath) + { + ArgUtil.NotNullOrEmpty(rootPath, nameof(rootPath)); + ArgUtil.NotNullOrEmpty(relativePath, nameof(relativePath)); + + if (!Path.IsPathRooted(rootPath)) + { + throw new ArgumentException($"{rootPath} should be a rooted path."); + } + + if (relativePath.IndexOfAny(Path.GetInvalidPathChars()) > -1) + { + throw new InvalidOperationException($"{relativePath} contains invalid path characters."); + } + else if (Path.GetFileName(relativePath).IndexOfAny(Path.GetInvalidFileNameChars()) > -1) + { + throw new InvalidOperationException($"{relativePath} contains invalid folder name characters."); + } + else if (Path.IsPathRooted(relativePath)) + { + throw new InvalidOperationException($"{relativePath} can not be a rooted path."); + } + else + { + rootPath = rootPath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + relativePath = relativePath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + + // Root the path + relativePath = String.Concat(rootPath, Path.AltDirectorySeparatorChar, relativePath); + + // Collapse ".." directories with their parent, and skip "." directories. + String[] split = relativePath.Split(new[] { Path.AltDirectorySeparatorChar }, StringSplitOptions.RemoveEmptyEntries); + var segments = new Stack(split.Length); + Int32 skip = 0; + for (Int32 i = split.Length - 1; i >= 0; i--) + { + String segment = split[i]; + if (String.Equals(segment, ".", StringComparison.Ordinal)) + { + continue; + } + else if (String.Equals(segment, "..", StringComparison.Ordinal)) + { + skip++; + } + else if (skip > 0) + { + skip--; + } + else + { + segments.Push(segment); + } + } + + if (skip > 0) + { + throw new InvalidOperationException($"The file path {relativePath} is invalid"); + } + +#if OS_WINDOWS + if (segments.Count > 1) + { + return String.Join(Path.DirectorySeparatorChar, segments); + } + else + { + return segments.Pop() + Path.DirectorySeparatorChar; + } +#else + return Path.DirectorySeparatorChar + String.Join(Path.DirectorySeparatorChar, segments); +#endif + } + } + + public static void CopyDirectory(string source, string target, CancellationToken cancellationToken) + { + // Validate args. + ArgUtil.Directory(source, nameof(source)); + ArgUtil.NotNullOrEmpty(target, nameof(target)); + ArgUtil.NotNull(cancellationToken, nameof(cancellationToken)); + cancellationToken.ThrowIfCancellationRequested(); + + // Create the target directory. + Directory.CreateDirectory(target); + + // Get the file contents of the directory to copy. + DirectoryInfo sourceDir = new DirectoryInfo(source); + foreach (FileInfo sourceFile in sourceDir.GetFiles() ?? new FileInfo[0]) + { + // Check if the file already exists. + cancellationToken.ThrowIfCancellationRequested(); + FileInfo targetFile = new FileInfo(Path.Combine(target, sourceFile.Name)); + if (!targetFile.Exists || + sourceFile.Length != targetFile.Length || + sourceFile.LastWriteTime != targetFile.LastWriteTime) + { + // Copy the file. + sourceFile.CopyTo(targetFile.FullName, true); + } + } + + // Copy the subdirectories. + foreach (DirectoryInfo subDir in sourceDir.GetDirectories() ?? new DirectoryInfo[0]) + { + CopyDirectory( + source: subDir.FullName, + target: Path.Combine(target, subDir.Name), + cancellationToken: cancellationToken); + } + } + + public static void ValidateExecutePermission(string directory) + { + ArgUtil.Directory(directory, nameof(directory)); + string dir = directory; + string failsafeString = Environment.GetEnvironmentVariable("AGENT_TEST_VALIDATE_EXECUTE_PERMISSIONS_FAILSAFE"); + int failsafe; + if (string.IsNullOrEmpty(failsafeString) || !int.TryParse(failsafeString, out failsafe)) + { + failsafe = 100; + } + + for (int i = 0; i < failsafe; i++) + { + try + { + Directory.EnumerateFileSystemEntries(dir).FirstOrDefault(); + } + catch (UnauthorizedAccessException ex) + { + // Permission to read the directory contents is required for '{0}' and each directory up the hierarchy. {1} + string message = $"Permission to read the directory contents is required for '{directory}' and each directory up the hierarchy. {ex.Message}"; + throw new UnauthorizedAccessException(message, ex); + } + + dir = Path.GetDirectoryName(dir); + if (string.IsNullOrEmpty(dir)) + { + return; + } + } + + // This should never happen. + throw new NotSupportedException($"Unable to validate execute permissions for directory '{directory}'. Exceeded maximum iterations."); + } + + /// + /// Recursively enumerates a directory without following directory reparse points. + /// + private static IEnumerable Enumerate(DirectoryInfo directory, CancellationTokenSource tokenSource) + { + ArgUtil.NotNull(directory, nameof(directory)); + ArgUtil.Equal(false, directory.Attributes.HasFlag(FileAttributes.ReparsePoint), nameof(directory.Attributes.HasFlag)); + + // Push the directory onto the processing stack. + var directories = new Stack(new[] { directory }); + while (directories.Count > 0) + { + // Pop the next directory. + directory = directories.Pop(); + foreach (FileSystemInfo item in directory.GetFileSystemInfos()) + { + // Push non-reparse-point directories onto the processing stack. + directory = item as DirectoryInfo; + if (directory != null && + !item.Attributes.HasFlag(FileAttributes.ReparsePoint)) + { + directories.Push(directory); + } + + // Then yield the directory. Otherwise there is a race condition when this method attempts to initialize + // the Attributes and the caller is deleting the reparse point in parallel (FileNotFoundException). + yield return item; + } + } + } + + private static void RemoveReadOnly(FileSystemInfo item) + { + ArgUtil.NotNull(item, nameof(item)); + if (item.Attributes.HasFlag(FileAttributes.ReadOnly)) + { + item.Attributes = item.Attributes & ~FileAttributes.ReadOnly; + } + } + } +} diff --git a/src/Runner.Sdk/Util/PathUtil.cs b/src/Runner.Sdk/Util/PathUtil.cs new file mode 100644 index 00000000000..ac1704b5c8b --- /dev/null +++ b/src/Runner.Sdk/Util/PathUtil.cs @@ -0,0 +1,36 @@ +using System; +using System.IO; + +namespace GitHub.Runner.Sdk +{ + public static class PathUtil + { +#if OS_WINDOWS + public static readonly string PathVariable = "Path"; +#else + public static readonly string PathVariable = "PATH"; +#endif + + public static string PrependPath(string path, string currentPath) + { + ArgUtil.NotNullOrEmpty(path, nameof(path)); + if (string.IsNullOrEmpty(currentPath)) + { + // Careful not to add a trailing separator if the PATH is empty. + // On OSX/Linux, a trailing separator indicates that "current directory" + // is added to the PATH, which is considered a security risk. + return path; + } + + // Not prepend path if it is already the first path in %PATH% + if (currentPath.StartsWith(path + Path.PathSeparator, IOUtil.FilePathStringComparison)) + { + return currentPath; + } + else + { + return path + Path.PathSeparator + currentPath; + } + } + } +} diff --git a/src/Runner.Sdk/Util/StringUtil.cs b/src/Runner.Sdk/Util/StringUtil.cs new file mode 100644 index 00000000000..740686c23eb --- /dev/null +++ b/src/Runner.Sdk/Util/StringUtil.cs @@ -0,0 +1,126 @@ +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Reflection; +using System.Text; + +namespace GitHub.Runner.Sdk +{ + public static class StringUtil + { + private static readonly object[] s_defaultFormatArgs = new object[] { null }; + private static Lazy s_serializerSettings = new Lazy(() => + { + var settings = new VssJsonMediaTypeFormatter().SerializerSettings; + settings.DateParseHandling = DateParseHandling.None; + settings.FloatParseHandling = FloatParseHandling.Double; + return settings; + }); + + static StringUtil() + { +#if OS_WINDOWS + // By default, only Unicode encodings, ASCII, and code page 28591 are supported. + // This line is required to support the full set of encodings that were included + // in Full .NET prior to 4.6. + // + // For example, on an en-US box, this is required for loading the encoding for the + // default console output code page '437'. Without loading the correct encoding for + // code page IBM437, some characters cannot be translated correctly, e.g. write 'ç' + // from powershell.exe. + Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); +#endif + } + + public static T ConvertFromJson(string value) + { + return JsonConvert.DeserializeObject(value, s_serializerSettings.Value); + } + + /// + /// Convert String to boolean, valid true string: "1", "true", "$true", valid false string: "0", "false", "$false". + /// + /// value to convert. + /// default result when value is null or empty or not a valid true/false string. + /// + public static bool ConvertToBoolean(string value, bool defaultValue = false) + { + if (string.IsNullOrEmpty(value)) + { + return defaultValue; + } + + switch (value.ToLowerInvariant()) + { + case "1": + case "true": + case "$true": + return true; + case "0": + case "false": + case "$false": + return false; + default: + return defaultValue; + } + } + + public static string ConvertToJson(object obj, Formatting formatting = Formatting.Indented) + { + return JsonConvert.SerializeObject(obj, formatting, s_serializerSettings.Value); + } + + public static void EnsureRegisterEncodings() + { + // The static constructor should have registered the required encodings. + } + + public static string Format(string format, params object[] args) + { + return Format(CultureInfo.InvariantCulture, format, args); + } + + public static Encoding GetSystemEncoding() + { +#if OS_WINDOWS + // The static constructor should have registered the required encodings. + // Code page 0 is equivalent to the current system default (i.e. CP_ACP). + // E.g. code page 1252 on an en-US box. + return Encoding.GetEncoding(0); +#else + throw new NotSupportedException(nameof(GetSystemEncoding)); // Should never reach here. +#endif + } + + private static string Format(CultureInfo culture, string format, params object[] args) + { + try + { + // 1) Protect against argument null exception for the format parameter. + // 2) Protect against argument null exception for the args parameter. + // 3) Coalesce null or empty args with an array containing one null element. + // This protects against format exceptions where string.Format thinks + // that not enough arguments were supplied, even though the intended arg + // literally is null or an empty array. + return string.Format( + culture, + format ?? string.Empty, + args == null || args.Length == 0 ? s_defaultFormatArgs : args); + } + catch (FormatException) + { + // TODO: Log that string format failed. Consider moving this into a context base class if that's the only place it's used. Then the current trace scope would be available as well. + if (args != null) + { + return string.Format(culture, "{0} {1}", format, string.Join(", ", args)); + } + + return format; + } + } + } +} diff --git a/src/Runner.Sdk/Util/UrlUtil.cs b/src/Runner.Sdk/Util/UrlUtil.cs new file mode 100644 index 00000000000..30b4f82fea9 --- /dev/null +++ b/src/Runner.Sdk/Util/UrlUtil.cs @@ -0,0 +1,37 @@ +using System; + +namespace GitHub.Runner.Sdk +{ + public static class UrlUtil + { + public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password) + { + ArgUtil.NotNull(baseUrl, nameof(baseUrl)); + + // return baseurl when there is no username and password + if (string.IsNullOrEmpty(username) && string.IsNullOrEmpty(password)) + { + return baseUrl; + } + + UriBuilder credUri = new UriBuilder(baseUrl); + + // ensure we have a username, uribuild will throw if username is empty but password is not. + if (string.IsNullOrEmpty(username)) + { + username = "emptyusername"; + } + + // escape chars in username for uri + credUri.UserName = Uri.EscapeDataString(username); + + // escape chars in password for uri + if (!string.IsNullOrEmpty(password)) + { + credUri.Password = Uri.EscapeDataString(password); + } + + return credUri.Uri; + } + } +} diff --git a/src/Runner.Sdk/Util/VssUtil.cs b/src/Runner.Sdk/Util/VssUtil.cs new file mode 100644 index 00000000000..7890f3ef946 --- /dev/null +++ b/src/Runner.Sdk/Util/VssUtil.cs @@ -0,0 +1,99 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Net.Http; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.OAuth; +using System.Net.Http.Headers; +using System.Runtime.InteropServices; +using System.Net; + +namespace GitHub.Runner.Sdk +{ + public static class VssUtil + { + public static void InitializeVssClientSettings(ProductInfoHeaderValue additionalUserAgent, IWebProxy proxy, IVssClientCertificateManager clientCert) + { + var headerValues = new List(); + headerValues.Add(additionalUserAgent); + headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})")); + + if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0) + { + headerValues.AddRange(VssClientHttpRequestSettings.Default.UserAgent); + } + + VssClientHttpRequestSettings.Default.UserAgent = headerValues; + VssClientHttpRequestSettings.Default.ClientCertificateManager = clientCert; +#if OS_LINUX || OS_OSX + // The .NET Core 2.1 runtime switched its HTTP default from HTTP 1.1 to HTTP 2. + // This causes problems with some versions of the Curl handler. + // See GitHub issue https://github.com/dotnet/corefx/issues/32376 + VssClientHttpRequestSettings.Default.UseHttp11 = true; +#endif + + VssHttpMessageHandler.DefaultWebProxy = proxy; + } + + public static VssConnection CreateConnection(Uri serverUri, VssCredentials credentials, IEnumerable additionalDelegatingHandler = null, TimeSpan? timeout = null) + { + VssClientHttpRequestSettings settings = VssClientHttpRequestSettings.Default.Clone(); + + int maxRetryRequest; + if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_RETRY") ?? string.Empty, out maxRetryRequest)) + { + maxRetryRequest = 3; + } + + // make sure MaxRetryRequest in range [3, 10] + settings.MaxRetryRequest = Math.Min(Math.Max(maxRetryRequest, 3), 10); + + if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_TIMEOUT") ?? string.Empty, out int httpRequestTimeoutSeconds)) + { + settings.SendTimeout = timeout ?? TimeSpan.FromSeconds(100); + } + else + { + // prefer environment variable + settings.SendTimeout = TimeSpan.FromSeconds(Math.Min(Math.Max(httpRequestTimeoutSeconds, 100), 1200)); + } + + + // Remove Invariant from the list of accepted languages. + // + // The constructor of VssHttpRequestSettings (base class of VssClientHttpRequestSettings) adds the current + // UI culture to the list of accepted languages. The UI culture will be Invariant on OSX/Linux when the + // LANG environment variable is not set when the program starts. If Invariant is in the list of accepted + // languages, then "System.ArgumentException: The value cannot be null or empty." will be thrown when the + // settings are applied to an HttpRequestMessage. + settings.AcceptLanguages.Remove(CultureInfo.InvariantCulture); + + VssConnection connection = new VssConnection(serverUri, new VssHttpMessageHandler(credentials, settings), additionalDelegatingHandler); + return connection; + } + + public static VssCredentials GetVssCredential(ServiceEndpoint serviceEndpoint) + { + ArgUtil.NotNull(serviceEndpoint, nameof(serviceEndpoint)); + ArgUtil.NotNull(serviceEndpoint.Authorization, nameof(serviceEndpoint.Authorization)); + ArgUtil.NotNullOrEmpty(serviceEndpoint.Authorization.Scheme, nameof(serviceEndpoint.Authorization.Scheme)); + + if (serviceEndpoint.Authorization.Parameters.Count == 0) + { + throw new ArgumentOutOfRangeException(nameof(serviceEndpoint)); + } + + VssCredentials credentials = null; + string accessToken; + if (serviceEndpoint.Authorization.Scheme == EndpointAuthorizationSchemes.OAuth && + serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out accessToken)) + { + credentials = new VssCredentials(null, new VssOAuthAccessTokenCredential(accessToken), CredentialPromptType.DoNotPrompt); + } + + return credentials; + } + } +} diff --git a/src/Runner.Sdk/Util/WhichUtil.cs b/src/Runner.Sdk/Util/WhichUtil.cs new file mode 100644 index 00000000000..d9e2ac5ef5b --- /dev/null +++ b/src/Runner.Sdk/Util/WhichUtil.cs @@ -0,0 +1,120 @@ +using System; +using System.IO; +using System.Linq; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Sdk +{ + public static class WhichUtil + { + public static string Which(string command, bool require = false, ITraceWriter trace = null) + { + ArgUtil.NotNullOrEmpty(command, nameof(command)); + trace?.Info($"Which: '{command}'"); + string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable); + if (string.IsNullOrEmpty(path)) + { + trace?.Info("PATH environment variable not defined."); + path = path ?? string.Empty; + } + + string[] pathSegments = path.Split(new Char[] { Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries); + for (int i = 0; i < pathSegments.Length; i++) + { + pathSegments[i] = Environment.ExpandEnvironmentVariables(pathSegments[i]); + } + + foreach (string pathSegment in pathSegments) + { + if (!string.IsNullOrEmpty(pathSegment) && Directory.Exists(pathSegment)) + { + string[] matches = null; +#if OS_WINDOWS + string pathExt = Environment.GetEnvironmentVariable("PATHEXT"); + if (string.IsNullOrEmpty(pathExt)) + { + // XP's system default value for PATHEXT system variable + pathExt = ".com;.exe;.bat;.cmd;.vbs;.vbe;.js;.jse;.wsf;.wsh"; + } + + string[] pathExtSegments = pathExt.Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries); + + // if command already has an extension. + if (pathExtSegments.Any(ext => command.EndsWith(ext, StringComparison.OrdinalIgnoreCase))) + { + try + { + matches = Directory.GetFiles(pathSegment, command); + } + catch (UnauthorizedAccessException ex) + { + trace?.Info("Ignore UnauthorizedAccess exception during Which."); + trace?.Verbose(ex.ToString()); + } + + if (matches != null && matches.Length > 0) + { + trace?.Info($"Location: '{matches.First()}'"); + return matches.First(); + } + } + else + { + string searchPattern; + searchPattern = StringUtil.Format($"{command}.*"); + try + { + matches = Directory.GetFiles(pathSegment, searchPattern); + } + catch (UnauthorizedAccessException ex) + { + trace?.Info("Ignore UnauthorizedAccess exception during Which."); + trace?.Verbose(ex.ToString()); + } + + if (matches != null && matches.Length > 0) + { + // add extension. + for (int i = 0; i < pathExtSegments.Length; i++) + { + string fullPath = Path.Combine(pathSegment, $"{command}{pathExtSegments[i]}"); + if (matches.Any(p => p.Equals(fullPath, StringComparison.OrdinalIgnoreCase))) + { + trace?.Info($"Location: '{fullPath}'"); + return fullPath; + } + } + } + } +#else + try + { + matches = Directory.GetFiles(pathSegment, command); + } + catch (UnauthorizedAccessException ex) + { + trace?.Info("Ignore UnauthorizedAccess exception during Which."); + trace?.Verbose(ex.ToString()); + } + + if (matches != null && matches.Length > 0) + { + trace?.Info($"Location: '{matches.First()}'"); + return matches.First(); + } +#endif + } + } + + trace?.Info("Not found."); + if (require) + { + throw new FileNotFoundException( + message: $"File not found: '{command}'", + fileName: command); + } + + return null; + } + } +} diff --git a/src/Runner.Service/Windows/App.config b/src/Runner.Service/Windows/App.config new file mode 100644 index 00000000000..8e15646352e --- /dev/null +++ b/src/Runner.Service/Windows/App.config @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/src/Runner.Service/Windows/FinalPublicKey.snk b/src/Runner.Service/Windows/FinalPublicKey.snk new file mode 100644 index 0000000000000000000000000000000000000000..110b59c7b0d27388353dcf4116f721595f473e58 GIT binary patch literal 160 zcmV;R0AK$ABme*efB*oL000060ssI2Bme+XQ$aBR1ONa500968(fU`!uG#RTE`+KN zuKf+^=>2N!kB9pMc5H)8nUWr|JLj6&)!f0|n$k8CAp(#KayILlN=pn$R@96PlTucm;!K;}lU1BV%Wh@=~);)AxZ!P8VeqOH+#FjlK9EuV{ OWf&lBz>_phTGEsG5JRQ_ literal 0 HcmV?d00001 diff --git a/src/Runner.Service/Windows/Program.cs b/src/Runner.Service/Windows/Program.cs new file mode 100644 index 00000000000..c34cb14cbbb --- /dev/null +++ b/src/Runner.Service/Windows/Program.cs @@ -0,0 +1,55 @@ +using System; +using System.ServiceProcess; +using System.Diagnostics; +using System.ComponentModel; + +namespace RunnerService +{ + static class Program + { + /// + /// The main entry point for the application. + /// + static int Main(String[] args) + { + if (args != null && args.Length == 1 && args[0].Equals("init", StringComparison.InvariantCultureIgnoreCase)) + { + // TODO: LOC all strings. + if (!EventLog.Exists("Application")) + { + Console.WriteLine("[ERROR] Application event log doesn't exist on current machine."); + return 1; + } + + EventLog applicationLog = new EventLog("Application"); + if (applicationLog.OverflowAction == OverflowAction.DoNotOverwrite) + { + Console.WriteLine("[WARNING] The retention policy for Application event log is set to \"Do not overwrite events\"."); + Console.WriteLine("[WARNING] Make sure manually clear logs as needed, otherwise RunnerService will stop writing output to event log."); + } + + try + { + EventLog.WriteEntry(RunnerService.EventSourceName, "create event log trace source for actions-runner service", EventLogEntryType.Information, 100); + return 0; + } + catch (Win32Exception ex) + { + Console.WriteLine("[ERROR] Unable to create '{0}' event source under 'Application' event log.", RunnerService.EventSourceName); + Console.WriteLine("[ERROR] {0}",ex.Message); + Console.WriteLine("[ERROR] Error Code: {0}", ex.ErrorCode); + return 1; + } + } + + ServiceBase[] ServicesToRun; + ServicesToRun = new ServiceBase[] + { + new RunnerService(args.Length > 0 ? args[0] : "ActionsRunnerService") + }; + ServiceBase.Run(ServicesToRun); + + return 0; + } + } +} diff --git a/src/Runner.Service/Windows/Properties/AssemblyInfo.cs b/src/Runner.Service/Windows/Properties/AssemblyInfo.cs new file mode 100644 index 00000000000..d3749bd52a2 --- /dev/null +++ b/src/Runner.Service/Windows/Properties/AssemblyInfo.cs @@ -0,0 +1,36 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("AgentService")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] +[assembly: AssemblyCompany("")] +[assembly: AssemblyProduct("AgentService")] +[assembly: AssemblyCopyright("Copyright © 2016")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("d12ebd71-0464-46d0-8394-40bcfba0a6f2")] + +// Version information for an assembly consists of the following four values: +// +// Major Version +// Minor Version +// Build Number +// Revision +// +// You can specify all the values or you can default the Build and Revision Numbers +// by using the '*' as shown below: +// [assembly: AssemblyVersion("1.0.*")] +[assembly: AssemblyVersion("1.0.0.0")] +[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/src/Runner.Service/Windows/Resource.Designer.cs b/src/Runner.Service/Windows/Resource.Designer.cs new file mode 100644 index 00000000000..503aca55614 --- /dev/null +++ b/src/Runner.Service/Windows/Resource.Designer.cs @@ -0,0 +1,144 @@ +//------------------------------------------------------------------------------ +// +// This code was generated by a tool. +// Runtime Version:4.0.30319.42000 +// +// Changes to this file may cause incorrect behavior and will be lost if +// the code is regenerated. +// +//------------------------------------------------------------------------------ + +namespace RunnerService { + using System; + + + /// + /// A strongly-typed resource class, for looking up localized strings, etc. + /// + // This class was auto-generated by the StronglyTypedResourceBuilder + // class via a tool like ResGen or Visual Studio. + // To add or remove a member, edit your .ResX file then rerun ResGen + // with the /str option, or rebuild your VS project. + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] + [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] + [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] + internal class Resource { + + private static global::System.Resources.ResourceManager resourceMan; + + private static global::System.Globalization.CultureInfo resourceCulture; + + [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] + internal Resource() { + } + + /// + /// Returns the cached ResourceManager instance used by this class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Resources.ResourceManager ResourceManager { + get { + if (object.ReferenceEquals(resourceMan, null)) { + global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("RunnerService.Resource", typeof(Resource).Assembly); + resourceMan = temp; + } + return resourceMan; + } + } + + /// + /// Overrides the current thread's CurrentUICulture property for all + /// resource lookups using this strongly typed resource class. + /// + [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] + internal static global::System.Globalization.CultureInfo Culture { + get { + return resourceCulture; + } + set { + resourceCulture = value; + } + } + + /// + /// Looks up a localized string similar to Runner listener exit with retryable error, re-launch runner in 5 seconds.. + /// + internal static string RunnerExitWithError { + get { + return ResourceManager.GetString("RunnerExitWithError", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener exit with 0 return code, stop the service, no retry needed.. + /// + internal static string RunnerExitWithoutError { + get { + return ResourceManager.GetString("RunnerExitWithoutError", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener exit with terminated error, stop the service, no retry needed.. + /// + internal static string RunnerExitWithTerminatedError { + get { + return ResourceManager.GetString("RunnerExitWithTerminatedError", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener exit with undefined return code, re-launch runner in 5 seconds.. + /// + internal static string RunnerExitWithUndefinedReturnCode { + get { + return ResourceManager.GetString("RunnerExitWithUndefinedReturnCode", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener update failed, stop the service.. + /// + internal static string RunnerUpdateFailed { + get { + return ResourceManager.GetString("RunnerUpdateFailed", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener exit because of updating, re-launch runner in 5 seconds.. + /// + internal static string RunnerUpdateInProcess { + get { + return ResourceManager.GetString("RunnerUpdateInProcess", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener has been updated to latest, restart the service to update the servicehost itself.. + /// + internal static string RunnerUpdateRestartNeeded { + get { + return ResourceManager.GetString("RunnerUpdateRestartNeeded", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Runner listener has been updated to latest, re-launch runner in 5 seconds.. + /// + internal static string RunnerUpdateSucceed { + get { + return ResourceManager.GetString("RunnerUpdateSucceed", resourceCulture); + } + } + + /// + /// Looks up a localized string similar to Crash servicehost to trigger SCM restart the serivce.. + /// + internal static string CrashServiceHost { + get { + return ResourceManager.GetString("CrashServiceHost", resourceCulture); + } + } + } +} diff --git a/src/Runner.Service/Windows/Resource.resx b/src/Runner.Service/Windows/Resource.resx new file mode 100644 index 00000000000..56c2c16132d --- /dev/null +++ b/src/Runner.Service/Windows/Resource.resx @@ -0,0 +1,147 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + text/microsoft-resx + + + 2.0 + + + System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 + + + Runner listener exit with 0 return code, stop the service, no retry needed. + + + Runner listener exit with terminated error, stop the service, no retry needed. + + + Runner listener exit with retryable error, re-launch runner in 5 seconds. + + + Runner listener exit because of updating, re-launch runner in 5 seconds. + + + Runner listener has been updated to latest, re-launch runner in 5 seconds. + + + Runner listener update failed, stop the service. + + + Runner listener has been updated to latest, restart the service to update the servicehost itself. + + + Crash servicehost to trigger SCM restart the serivce. + + + Runner listener exit with undefined return code, re-launch runner in 5 seconds. + + \ No newline at end of file diff --git a/src/Runner.Service/Windows/RunnerService.Designer.cs b/src/Runner.Service/Windows/RunnerService.Designer.cs new file mode 100644 index 00000000000..9523bdd8c37 --- /dev/null +++ b/src/Runner.Service/Windows/RunnerService.Designer.cs @@ -0,0 +1,37 @@ +namespace RunnerService +{ + partial class RunnerService + { + /// + /// Required designer variable. + /// + private System.ComponentModel.IContainer components = null; + + /// + /// Clean up any resources being used. + /// + /// true if managed resources should be disposed; otherwise, false. + protected override void Dispose(bool disposing) + { + if (disposing && (components != null)) + { + components.Dispose(); + } + base.Dispose(disposing); + } + + #region Component Designer generated code + + /// + /// Required method for Designer support - do not modify + /// the contents of this method with the code editor. + /// + private void InitializeComponent() + { + components = new System.ComponentModel.Container(); + this.ServiceName = "Service1"; + } + + #endregion + } +} diff --git a/src/Runner.Service/Windows/RunnerService.cs b/src/Runner.Service/Windows/RunnerService.cs new file mode 100644 index 00000000000..ca2652f624e --- /dev/null +++ b/src/Runner.Service/Windows/RunnerService.cs @@ -0,0 +1,354 @@ +using System; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.Reflection; +using System.Runtime.InteropServices; +using System.ServiceProcess; +using System.Threading; +using System.Threading.Tasks; + +namespace RunnerService +{ + public partial class RunnerService : ServiceBase + { + public const string EventSourceName = "ActionsRunnerService"; + private const int CTRL_C_EVENT = 0; + private const int CTRL_BREAK_EVENT = 1; + private bool _restart = false; + private Process RunnerListener { get; set; } + private bool Stopping { get; set; } + private object ServiceLock { get; set; } + private Task RunningLoop { get; set; } + + public RunnerService(string serviceName) + { + ServiceLock = new Object(); + InitializeComponent(); + base.ServiceName = serviceName; + } + + protected override void OnStart(string[] args) + { + RunningLoop = Task.Run( + () => + { + try + { + bool stopping; + WriteInfo("Starting Actions Runner Service"); + TimeSpan timeBetweenRetries = TimeSpan.FromSeconds(5); + + lock (ServiceLock) + { + stopping = Stopping; + } + + while (!stopping) + { + WriteInfo("Starting Actions Runner listener"); + lock (ServiceLock) + { + RunnerListener = CreateRunnerListener(); + RunnerListener.OutputDataReceived += RunnerListener_OutputDataReceived; + RunnerListener.ErrorDataReceived += RunnerListener_ErrorDataReceived; + RunnerListener.Start(); + RunnerListener.BeginOutputReadLine(); + RunnerListener.BeginErrorReadLine(); + } + + RunnerListener.WaitForExit(); + int exitCode = RunnerListener.ExitCode; + + // exit code 0 and 1 need stop service + // exit code 2 and 3 need restart runner + switch (exitCode) + { + case 0: + Stopping = true; + WriteInfo(Resource.RunnerExitWithoutError); + break; + case 1: + Stopping = true; + WriteInfo(Resource.RunnerExitWithTerminatedError); + break; + case 2: + WriteInfo(Resource.RunnerExitWithError); + break; + case 3: + WriteInfo(Resource.RunnerUpdateInProcess); + var updateResult = HandleRunnerUpdate(); + if (updateResult == RunnerUpdateResult.Succeed) + { + WriteInfo(Resource.RunnerUpdateSucceed); + } + else if (updateResult == RunnerUpdateResult.Failed) + { + WriteInfo(Resource.RunnerUpdateFailed); + Stopping = true; + } + else if (updateResult == RunnerUpdateResult.SucceedNeedRestart) + { + WriteInfo(Resource.RunnerUpdateRestartNeeded); + _restart = true; + ExitCode = int.MaxValue; + Stop(); + } + break; + default: + WriteInfo(Resource.RunnerExitWithUndefinedReturnCode); + break; + } + + if (Stopping) + { + ExitCode = exitCode; + Stop(); + } + else + { + // wait for few seconds before restarting the process + Thread.Sleep(timeBetweenRetries); + } + + lock (ServiceLock) + { + RunnerListener.OutputDataReceived -= RunnerListener_OutputDataReceived; + RunnerListener.ErrorDataReceived -= RunnerListener_ErrorDataReceived; + RunnerListener.Dispose(); + RunnerListener = null; + stopping = Stopping; + } + } + } + catch (Exception exception) + { + WriteException(exception); + ExitCode = 99; + Stop(); + } + }); + } + + private void RunnerListener_ErrorDataReceived(object sender, DataReceivedEventArgs e) + { + if (!string.IsNullOrEmpty(e.Data)) + { + WriteToEventLog(e.Data, EventLogEntryType.Error); + } + } + + private void RunnerListener_OutputDataReceived(object sender, DataReceivedEventArgs e) + { + if (!string.IsNullOrEmpty(e.Data)) + { + WriteToEventLog(e.Data, EventLogEntryType.Information); + } + } + + private Process CreateRunnerListener() + { + string exeLocation = Assembly.GetEntryAssembly().Location; + string runnerExeLocation = Path.Combine(Path.GetDirectoryName(exeLocation), "Runner.Listener.exe"); + Process newProcess = new Process(); + newProcess.StartInfo = new ProcessStartInfo(runnerExeLocation, "run --startuptype service"); + newProcess.StartInfo.CreateNoWindow = true; + newProcess.StartInfo.UseShellExecute = false; + newProcess.StartInfo.RedirectStandardInput = true; + newProcess.StartInfo.RedirectStandardOutput = true; + newProcess.StartInfo.RedirectStandardError = true; + return newProcess; + } + + protected override void OnShutdown() + { + SendCtrlSignalToRunnerListener(CTRL_BREAK_EVENT); + base.OnShutdown(); + } + + protected override void OnStop() + { + lock (ServiceLock) + { + Stopping = true; + + // throw exception during OnStop() will make SCM think the service crash and trigger recovery option. + // in this way we can self-update the service host. + if (_restart) + { + throw new Exception(Resource.CrashServiceHost); + } + + SendCtrlSignalToRunnerListener(CTRL_C_EVENT); + } + } + + // this will send either Ctrl-C or Ctrl-Break to runner.listener + // Ctrl-C will be used for OnStop() + // Ctrl-Break will be used for OnShutdown() + private void SendCtrlSignalToRunnerListener(uint signal) + { + try + { + if (RunnerListener != null && !RunnerListener.HasExited) + { + // Try to let the runner process know that we are stopping + //Attach service process to console of Runner.Listener process. This is needed, + //because windows service doesn't use its own console. + if (AttachConsole((uint)RunnerListener.Id)) + { + //Prevent main service process from stopping because of Ctrl + C event with SetConsoleCtrlHandler + SetConsoleCtrlHandler(null, true); + try + { + //Generate console event for current console with GenerateConsoleCtrlEvent (processGroupId should be zero) + GenerateConsoleCtrlEvent(signal, 0); + //Wait for the process to finish (give it up to 30 seconds) + RunnerListener.WaitForExit(30000); + } + finally + { + //Disconnect from console and restore Ctrl+C handling by main process + FreeConsole(); + SetConsoleCtrlHandler(null, false); + } + } + + // if runner is still running, kill it + if (!RunnerListener.HasExited) + { + RunnerListener.Kill(); + } + } + } + catch (Exception exception) + { + // InvalidOperationException is thrown when there is no process associated to the process object. + // There is no process to kill, Log the exception and shutdown the service. + // If we don't handle this here, the service get into a state where it can neither be stoped nor restarted (Error 1061) + WriteException(exception); + } + } + + private RunnerUpdateResult HandleRunnerUpdate() + { + // sleep 5 seconds wait for upgrade script to finish + Thread.Sleep(5000); + + // looking update result record under _diag folder (the log file itself will indicate the result) + // SelfUpdate-20160711-160300.log.succeed or SelfUpdate-20160711-160300.log.fail + // Find the latest upgrade log, make sure the log is created less than 15 seconds. + // When log file named as SelfUpdate-20160711-160300.log.succeedneedrestart, Exit(int.max), during Exit() throw Exception, this will trigger SCM to recovery the service by restart it + // since SCM cache the ServiceHost in memory, sometime we need update the servicehost as well, in this way we can upgrade the ServiceHost as well. + + DirectoryInfo dirInfo = new DirectoryInfo(GetDiagnosticFolderPath()); + FileInfo[] updateLogs = dirInfo.GetFiles("SelfUpdate-*-*.log.*") ?? new FileInfo[0]; + if (updateLogs.Length == 0) + { + // totally wrong, we are not even get a update log. + return RunnerUpdateResult.Failed; + } + else + { + FileInfo latestLogFile = null; + DateTime latestLogTimestamp = DateTime.MinValue; + foreach (var logFile in updateLogs) + { + int timestampStartIndex = logFile.Name.IndexOf("-") + 1; + int timestampEndIndex = logFile.Name.LastIndexOf(".log") - 1; + string timestamp = logFile.Name.Substring(timestampStartIndex, timestampEndIndex - timestampStartIndex + 1); + DateTime updateTime; + if (DateTime.TryParseExact(timestamp, "yyyyMMdd-HHmmss", null, DateTimeStyles.None, out updateTime) && + updateTime > latestLogTimestamp) + { + latestLogFile = logFile; + latestLogTimestamp = updateTime; + } + } + + if (latestLogFile == null || latestLogTimestamp == DateTime.MinValue) + { + // we can't find update log with expected naming convention. + return RunnerUpdateResult.Failed; + } + + latestLogFile.Refresh(); + if (DateTime.UtcNow - latestLogFile.LastWriteTimeUtc > TimeSpan.FromSeconds(15)) + { + // the latest update log we find is more than 15 sec old, the update process is busted. + return RunnerUpdateResult.Failed; + } + else + { + string resultString = Path.GetExtension(latestLogFile.Name).TrimStart('.'); + RunnerUpdateResult result; + if (Enum.TryParse(resultString, true, out result)) + { + // return the result indicated by the update log. + return result; + } + else + { + // can't convert the result string, return failed to stop the service. + return RunnerUpdateResult.Failed; + } + } + } + } + + private void WriteToEventLog(string eventText, EventLogEntryType entryType) + { + EventLog.WriteEntry(EventSourceName, eventText, entryType, 100); + } + + private string GetDiagnosticFolderPath() + { + return Path.Combine(Path.GetDirectoryName(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location)), "_diag"); + } + + private void WriteError(int exitCode) + { + String diagFolder = GetDiagnosticFolderPath(); + String eventText = String.Format( + CultureInfo.InvariantCulture, + "The Runner.Listener process failed to start successfully. It exited with code {0}. Check the latest Runner log files in {1} for more information.", + exitCode, + diagFolder); + + WriteToEventLog(eventText, EventLogEntryType.Error); + } + + private void WriteInfo(string message) + { + WriteToEventLog(message, EventLogEntryType.Information); + } + + private void WriteException(Exception exception) + { + WriteToEventLog(exception.ToString(), EventLogEntryType.Error); + } + + private enum RunnerUpdateResult + { + Succeed, + Failed, + SucceedNeedRestart, + } + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool GenerateConsoleCtrlEvent(uint dwCtrlEvent, uint dwProcessGroupId); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool AttachConsole(uint dwProcessId); + + [DllImport("kernel32.dll", SetLastError = true, ExactSpelling = true)] + private static extern bool FreeConsole(); + + [DllImport("kernel32.dll", SetLastError = true)] + private static extern bool SetConsoleCtrlHandler(ConsoleCtrlDelegate HandlerRoutine, bool Add); + + // Delegate type to be used as the Handler Routine for SetConsoleCtrlHandler + delegate Boolean ConsoleCtrlDelegate(uint CtrlType); + } +} diff --git a/src/Runner.Service/Windows/RunnerService.csproj b/src/Runner.Service/Windows/RunnerService.csproj new file mode 100644 index 00000000000..8871f9a8004 --- /dev/null +++ b/src/Runner.Service/Windows/RunnerService.csproj @@ -0,0 +1,83 @@ + + + + + Debug + AnyCPU + {D12EBD71-0464-46D0-8394-40BCFBA0A6F2} + WinExe + Properties + RunnerService + RunnerService + true + FinalPublicKey.snk + true + v4.5 + 512 + true + + + AnyCPU + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + false + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + + + + + + + + + + + Component + + + RunnerService.cs + + + + + True + True + Resource.resx + + + + + + + + + ResXFileCodeGenerator + Resource.Designer.cs + + + + + diff --git a/src/Runner.Worker/ActionCommandManager.cs b/src/Runner.Worker/ActionCommandManager.cs new file mode 100644 index 00000000000..a2819fdccb7 --- /dev/null +++ b/src/Runner.Worker/ActionCommandManager.cs @@ -0,0 +1,527 @@ +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(ActionCommandManager))] + public interface IActionCommandManager : IRunnerService + { + void EnablePluginInternalCommand(); + void DisablePluginInternalCommand(); + bool TryProcessCommand(IExecutionContext context, string input); + } + + public sealed class ActionCommandManager : RunnerService, IActionCommandManager + { + private const string _stopCommand = "stop-commands"; + private readonly Dictionary _commandExtensions = new Dictionary(StringComparer.OrdinalIgnoreCase); + private HashSet _registeredCommands = new HashSet(StringComparer.OrdinalIgnoreCase); + private readonly object _commandSerializeLock = new object(); + private bool _stopProcessCommand = false; + private string _stopToken = null; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _registeredCommands.Add(_stopCommand); + + // Register all command extensions + var extensionManager = hostContext.GetService(); + foreach (var commandExt in extensionManager.GetExtensions() ?? new List()) + { + Trace.Info($"Register action command extension for command {commandExt.Command}"); + _commandExtensions[commandExt.Command] = commandExt; + if (commandExt.Command != "internal-set-repo-path") + { + _registeredCommands.Add(commandExt.Command); + } + } + } + + public void EnablePluginInternalCommand() + { + Trace.Info($"Enable plugin internal command extension."); + _registeredCommands.Add("internal-set-repo-path"); + } + + public void DisablePluginInternalCommand() + { + Trace.Info($"Disable plugin internal command extension."); + _registeredCommands.Remove("internal-set-repo-path"); + } + + public bool TryProcessCommand(IExecutionContext context, string input) + { + if (string.IsNullOrEmpty(input)) + { + return false; + } + + // TryParse input to Command + ActionCommand actionCommand; + if (!ActionCommand.TryParseV2(input, _registeredCommands, out actionCommand) && + !ActionCommand.TryParse(input, _registeredCommands, out actionCommand)) + { + return false; + } + + // process action command in serialize oreder. + lock (_commandSerializeLock) + { + if (_stopProcessCommand) + { + if (!string.IsNullOrEmpty(_stopToken) && + string.Equals(actionCommand.Command, _stopToken, StringComparison.OrdinalIgnoreCase)) + { + context.Output(input); + context.Debug("Resume processing commands"); + _registeredCommands.Remove(_stopToken); + _stopProcessCommand = false; + _stopToken = null; + return true; + } + else + { + context.Debug($"Process commands has been stopped and waiting for '##[{_stopToken}]' to resume."); + return false; + } + } + else + { + if (string.Equals(actionCommand.Command, _stopCommand, StringComparison.OrdinalIgnoreCase)) + { + context.Output(input); + context.Debug("Paused processing commands until '##[{actionCommand.Data}]' is received"); + _stopToken = actionCommand.Data; + _stopProcessCommand = true; + _registeredCommands.Add(_stopToken); + return true; + } + else if (_commandExtensions.TryGetValue(actionCommand.Command, out IActionCommandExtension extension)) + { + bool omitEcho; + try + { + extension.ProcessCommand(context, input, actionCommand, out omitEcho); + } + catch (Exception ex) + { + omitEcho = true; + context.Output(input); + context.Error($"Unable to process command '{input}' successfully."); + context.Error(ex); + context.CommandResult = TaskResult.Failed; + } + + if (!omitEcho) + { + context.Output(input); + context.Debug($"Processed command"); + } + + } + else + { + context.Warning($"Can't find command extension for ##[{actionCommand.Command}.command]."); + } + } + } + + return true; + } + } + + public interface IActionCommandExtension : IExtension + { + string Command { get; } + + void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho); + } + + public sealed class InternalPluginSetRepoPathCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "internal-set-repo-path"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.repoFullName, out string repoFullName) || string.IsNullOrEmpty(repoFullName)) + { + throw new Exception("Required field 'repoFullName' is missing in ##[internal-set-repo-path] command."); + } + + if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.workspaceRepo, out string workspaceRepo) || string.IsNullOrEmpty(workspaceRepo)) + { + throw new Exception("Required field 'workspaceRepo' is missing in ##[internal-set-repo-path] command."); + } + + var directoryManager = HostContext.GetService(); + var trackingConfig = directoryManager.UpdateRepositoryDirectory(context, repoFullName, command.Data, StringUtil.ConvertToBoolean(workspaceRepo)); + + omitEcho = true; + } + + private static class SetRepoPathCommandProperties + { + public const String repoFullName = "repoFullName"; + public const String workspaceRepo = "workspaceRepo"; + } + } + + public sealed class SetEnvCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "set-env"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName)) + { + throw new Exception("Required field 'name' is missing in ##[set-env] command."); + } + + context.EnvironmentVariables[envName] = command.Data; + context.SetEnvContext(envName, command.Data); + context.Output(line); + context.Debug($"{envName}='{command.Data}'"); + omitEcho = true; + } + + private static class SetEnvCommandProperties + { + public const String Name = "name"; + } + } + + public sealed class SetOutputCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "set-output"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName)) + { + throw new Exception("Required field 'name' is missing in ##[set-output] command."); + } + + context.SetOutput(outputName, command.Data, out var reference); + context.Output(line); + context.Debug($"{reference}='{command.Data}'"); + omitEcho = true; + } + + private static class SetOutputCommandProperties + { + public const String Name = "name"; + } + } + + public sealed class SaveStateCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "save-state"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName)) + { + throw new Exception("Required field 'name' is missing in ##[save-state] command."); + } + + context.IntraActionState[stateName] = command.Data; + context.Debug($"Save intra-action state {stateName} = {command.Data}"); + omitEcho = true; + } + + private static class SaveStateCommandProperties + { + public const String Name = "name"; + } + } + + public sealed class AddMaskCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "add-mask"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + if (string.IsNullOrWhiteSpace(command.Data)) + { + context.Warning("Can't add secret mask for empty string."); + } + else + { + HostContext.SecretMasker.AddValue(command.Data); + Trace.Info($"Add new secret mask with length of {command.Data.Length}"); + } + + omitEcho = true; + } + } + + public sealed class AddPathCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "add-path"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + ArgUtil.NotNullOrEmpty(command.Data, "path"); + context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture)); + context.PrependPath.Add(command.Data); + omitEcho = false; + } + } + + public sealed class AddMatcherCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "add-matcher"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + omitEcho = false; + var file = command.Data; + + // File is required + if (string.IsNullOrEmpty(file)) + { + context.Warning("File path must be specified."); + return; + } + + // Translate file path back from container path + if (context.Container != null) + { + file = context.Container.TranslateToHostPath(file); + } + + // Root the path + if (!Path.IsPathRooted(file)) + { + var githubContext = context.ExpressionValues["github"] as GitHubContext; + ArgUtil.NotNull(githubContext, nameof(githubContext)); + var workspace = githubContext["workspace"].ToString(); + ArgUtil.NotNullOrEmpty(workspace, "workspace"); + + file = Path.Combine(workspace, file); + } + + // Load the config + var config = IOUtil.LoadObject(file); + + // Add + if (config?.Matchers?.Count > 0) + { + config.Validate(); + context.AddMatchers(config); + } + } + } + + public sealed class RemoveMatcherCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "remove-matcher"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + omitEcho = false; + command.Properties.TryGetValue(RemoveMatcherCommandProperties.Owner, out string owner); + var file = command.Data; + + // Owner and file are mutually exclusive + if (!string.IsNullOrEmpty(owner) && !string.IsNullOrEmpty(file)) + { + context.Warning("Either specify a matcher owner name or a file path. Both values cannot be set."); + return; + } + + // Owner or file is required + if (string.IsNullOrEmpty(owner) && string.IsNullOrEmpty(file)) + { + context.Warning("Either a matcher owner name or a file path must be specified."); + return; + } + + // Remove by owner + if (!string.IsNullOrEmpty(owner)) + { + context.RemoveMatchers(new[] { owner }); + } + // Remove by file + else + { + // Translate file path back from container path + if (context.Container != null) + { + file = context.Container.TranslateToHostPath(file); + } + + // Root the path + if (!Path.IsPathRooted(file)) + { + var githubContext = context.ExpressionValues["github"] as GitHubContext; + ArgUtil.NotNull(githubContext, nameof(githubContext)); + var workspace = githubContext["workspace"].ToString(); + ArgUtil.NotNullOrEmpty(workspace, "workspace"); + + file = Path.Combine(workspace, file); + } + + // Load the config + var config = IOUtil.LoadObject(file); + + if (config?.Matchers?.Count > 0) + { + // Remove + context.RemoveMatchers(config.Matchers.Select(x => x.Owner)); + } + } + } + + private static class RemoveMatcherCommandProperties + { + public const string Owner = "owner"; + } + } + + public sealed class DebugCommandExtension : RunnerService, IActionCommandExtension + { + public string Command => "debug"; + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, out bool omitEcho) + { + omitEcho = true; + context.Debug(command.Data); + } + } + + public sealed class WarningCommandExtension : IssueCommandExtension + { + public override IssueType Type => IssueType.Warning; + + public override string Command => "warning"; + } + + public sealed class ErrorCommandExtension : IssueCommandExtension + { + public override IssueType Type => IssueType.Error; + + public override string Command => "error"; + } + + public abstract class IssueCommandExtension : RunnerService, IActionCommandExtension + { + public abstract IssueType Type { get; } + public abstract string Command { get; } + + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, out bool omitEcho) + { + omitEcho = true; + command.Properties.TryGetValue(IssueCommandProperties.File, out string file); + command.Properties.TryGetValue(IssueCommandProperties.Line, out string line); + command.Properties.TryGetValue(IssueCommandProperties.Column, out string column); + + Issue issue = new Issue() + { + Category = "General", + Type = this.Type, + Message = command.Data + }; + + if (!string.IsNullOrEmpty(file)) + { + issue.Category = "Code"; + + if (context.Container != null) + { + // Translate file path back from container path + file = context.Container.TranslateToHostPath(file); + command.Properties[IssueCommandProperties.File] = file; + } + + // Get the values that represent the server path given a local path + string repoName = context.GetGitHubContext("repository"); + var repoPath = context.GetGitHubContext("workspace"); + + string relativeSourcePath = IOUtil.MakeRelative(file, repoPath); + if (!string.Equals(relativeSourcePath, file, IOUtil.FilePathStringComparison)) + { + // add repo info + if (!string.IsNullOrEmpty(repoName)) + { + command.Properties["repo"] = repoName; + } + + if (!string.IsNullOrEmpty(relativeSourcePath)) + { + // replace sourcePath with the new relative path + // prefer `/` on all platforms + command.Properties[IssueCommandProperties.File] = relativeSourcePath.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } + } + } + + foreach (var property in command.Properties) + { + issue.Data[property.Key] = property.Value; + } + + context.AddIssue(issue); + } + + private static class IssueCommandProperties + { + public const String File = "file"; + public const String Line = "line"; + public const String Column = "col"; + } + + } + + public sealed class GroupCommandExtension : GroupingCommandExtension + { + public override string Command => "group"; + } + + public sealed class EndGroupCommandExtension : GroupingCommandExtension + { + public override string Command => "endgroup"; + } + + public abstract class GroupingCommandExtension : RunnerService, IActionCommandExtension + { + public abstract string Command { get; } + public Type ExtensionType => typeof(IActionCommandExtension); + + public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, out bool omitEcho) + { + var data = this is GroupCommandExtension ? command.Data : string.Empty; + context.Output($"##[{Command}]{data}"); + omitEcho = true; + } + } +} diff --git a/src/Runner.Worker/ActionManager.cs b/src/Runner.Worker/ActionManager.cs new file mode 100644 index 00000000000..e92aaf4f50c --- /dev/null +++ b/src/Runner.Worker/ActionManager.cs @@ -0,0 +1,847 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using GitHub.Runner.Worker.Container; +using GitHub.Services.Common; +using Newtonsoft.Json; +using Pipelines = GitHub.DistributedTask.Pipelines; +using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(ActionManager))] + public interface IActionManager : IRunnerService + { + Dictionary CachedActionContainers { get; } + Task> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable steps); + Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action); + } + + public sealed class ActionManager : RunnerService, IActionManager + { + private const int _defaultFileStreamBufferSize = 4096; + + //81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k). + private const int _defaultCopyBufferSize = 81920; + + private readonly Dictionary _cachedActionContainers = new Dictionary(); + + public Dictionary CachedActionContainers => _cachedActionContainers; + public async Task> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable steps) + { + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ArgUtil.NotNull(steps, nameof(steps)); + + executionContext.Output("Prepare all required actions"); + Dictionary> imagesToPull = new Dictionary>(StringComparer.OrdinalIgnoreCase); + Dictionary> imagesToBuild = new Dictionary>(StringComparer.OrdinalIgnoreCase); + Dictionary imagesToBuildInfo = new Dictionary(StringComparer.OrdinalIgnoreCase); + List containerSetupSteps = new List(); + IEnumerable actions = steps.OfType(); + + // TODO: Depreciate the PREVIEW_ACTION_TOKEN + // Log even if we aren't using it to ensure users know. + if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN"))) + { + executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is depreciated. Please remove it from the repository's secrets"); + } + + foreach (var action in actions) + { + if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry) + { + ArgUtil.NotNull(action, nameof(action)); + var containerReference = action.Reference as Pipelines.ContainerRegistryReference; + ArgUtil.NotNull(containerReference, nameof(containerReference)); + ArgUtil.NotNullOrEmpty(containerReference.Image, nameof(containerReference.Image)); + + if (!imagesToPull.ContainsKey(containerReference.Image)) + { + imagesToPull[containerReference.Image] = new List(); + } + + Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'"); + imagesToPull[containerReference.Image].Add(action.Id); + } + else if (action.Reference.Type == Pipelines.ActionSourceType.Repository) + { + // only download the repository archive + await DownloadRepositoryActionAsync(executionContext, action); + + // more preparation base on content in the repository (action.yml) + var setupInfo = PrepareRepositoryActionAsync(executionContext, action); + if (setupInfo != null) + { + if (!string.IsNullOrEmpty(setupInfo.Image)) + { + if (!imagesToPull.ContainsKey(setupInfo.Image)) + { + imagesToPull[setupInfo.Image] = new List(); + } + + Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'"); + imagesToPull[setupInfo.Image].Add(action.Id); + } + else + { + ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository)); + + if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository)) + { + imagesToBuild[setupInfo.ActionRepository] = new List(); + } + + Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'"); + imagesToBuild[setupInfo.ActionRepository].Add(action.Id); + imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo; + } + } + } + } + + if (imagesToPull.Count > 0) + { + foreach (var imageToPull in imagesToPull) + { + Trace.Info($"{imageToPull.Value.Count} steps need to pull image '{imageToPull.Key}'"); + containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.PullActionContainerAsync, + condition: $"{PipelineTemplateConstants.Success}()", + displayName: $"Pull {imageToPull.Key}", + data: new ContainerSetupInfo(imageToPull.Value, imageToPull.Key))); + } + } + + if (imagesToBuild.Count > 0) + { + foreach (var imageToBuild in imagesToBuild) + { + var setupInfo = imagesToBuildInfo[imageToBuild.Key]; + Trace.Info($"{imageToBuild.Value.Count} steps need to build image from '{setupInfo.Dockerfile}'"); + containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.BuildActionContainerAsync, + condition: $"{PipelineTemplateConstants.Success}()", + displayName: $"Build {setupInfo.ActionRepository}", + data: new ContainerSetupInfo(imageToBuild.Value, setupInfo.Dockerfile, setupInfo.WorkingDirectory))); + } + } + +#if !OS_LINUX + if (containerSetupSteps.Count > 0) + { + executionContext.Output("Container action is only supported on Linux, skip pull and build docker images."); + containerSetupSteps.Clear(); + } +#endif + + return containerSetupSteps; + } + + public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action) + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(action, nameof(action)); + + // Initialize the definition wrapper object. + var definition = new Definition() + { + Data = new ActionDefinitionData() + }; + + if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry) + { + Trace.Info("Load action that reference container from registry."); + CachedActionContainers.TryGetValue(action.Id, out var container); + ArgUtil.NotNull(container, nameof(container)); + definition.Data.Execution = new ContainerActionExecutionData() + { + Image = container.ContainerImage + }; + + Trace.Info($"Using action container image: {container.ContainerImage}."); + } + else if (action.Reference.Type == Pipelines.ActionSourceType.Repository) + { + string actionDirectory = null; + var repoAction = action.Reference as Pipelines.RepositoryPathReference; + if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase)) + { + actionDirectory = executionContext.GetGitHubContext("workspace"); + if (!string.IsNullOrEmpty(repoAction.Path)) + { + actionDirectory = Path.Combine(actionDirectory, repoAction.Path); + } + } + else + { + actionDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repoAction.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repoAction.Ref); + if (!string.IsNullOrEmpty(repoAction.Path)) + { + actionDirectory = Path.Combine(actionDirectory, repoAction.Path); + } + } + + Trace.Info($"Load action that reference repository from '{actionDirectory}'"); + definition.Directory = actionDirectory; + + string manifestFile = Path.Combine(actionDirectory, "action.yml"); + string dockerFile = Path.Combine(actionDirectory, "Dockerfile"); + string dockerFileLowerCase = Path.Combine(actionDirectory, "dockerfile"); + if (File.Exists(manifestFile)) + { + var manifestManager = HostContext.GetService(); + definition.Data = manifestManager.Load(executionContext, manifestFile); + + Trace.Verbose($"Action friendly name: '{definition.Data.Name}'"); + Trace.Verbose($"Action description: '{definition.Data.Description}'"); + + if (definition.Data.Inputs != null) + { + foreach (var input in definition.Data.Inputs) + { + Trace.Verbose($"Action input: '{input.Key.ToString()}' default to '{input.Value.ToString()}'"); + } + } + + if (definition.Data.Execution.ExecutionType == ActionExecutionType.Container) + { + var containerAction = definition.Data.Execution as ContainerActionExecutionData; + Trace.Info($"Action container Dockerfile/image: {containerAction.Image}."); + + if (containerAction.Arguments != null) + { + Trace.Info($"Action container args: {StringUtil.ConvertToJson(containerAction.Arguments)}."); + } + + if (containerAction.Environment != null) + { + Trace.Info($"Action container env: {StringUtil.ConvertToJson(containerAction.Environment)}."); + } + + if (!string.IsNullOrEmpty(containerAction.EntryPoint)) + { + Trace.Info($"Action container entrypoint: {containerAction.EntryPoint}."); + } + + if (!string.IsNullOrEmpty(containerAction.Cleanup)) + { + Trace.Info($"Action container cleanup entrypoint: {containerAction.Cleanup}."); + } + + if (CachedActionContainers.TryGetValue(action.Id, out var container)) + { + Trace.Info($"Image '{containerAction.Image}' already built/pulled, use image: {container.ContainerImage}."); + containerAction.Image = container.ContainerImage; + } + } + else if (definition.Data.Execution.ExecutionType == ActionExecutionType.NodeJS) + { + var nodeAction = definition.Data.Execution as NodeJSActionExecutionData; + Trace.Info($"Action node.js file: {nodeAction.Script}."); + Trace.Info($"Action cleanup node.js file: {nodeAction.Cleanup ?? "N/A"}."); + } + else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Plugin) + { + var pluginAction = definition.Data.Execution as PluginActionExecutionData; + var pluginManager = HostContext.GetService(); + var plugin = pluginManager.GetPluginAction(pluginAction.Plugin); + + ArgUtil.NotNull(plugin, pluginAction.Plugin); + ArgUtil.NotNullOrEmpty(plugin.PluginTypeName, pluginAction.Plugin); + + pluginAction.Plugin = plugin.PluginTypeName; + Trace.Info($"Action plugin: {plugin.PluginTypeName}."); + + if (!string.IsNullOrEmpty(plugin.PostPluginTypeName)) + { + pluginAction.Cleanup = plugin.PostPluginTypeName; + Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}."); + } + } + else + { + throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString()); + } + } + else if (File.Exists(dockerFile)) + { + if (CachedActionContainers.TryGetValue(action.Id, out var container)) + { + definition.Data.Execution = new ContainerActionExecutionData() + { + Image = container.ContainerImage + }; + } + else + { + definition.Data.Execution = new ContainerActionExecutionData() + { + Image = dockerFile + }; + } + } + else if (File.Exists(dockerFileLowerCase)) + { + if (CachedActionContainers.TryGetValue(action.Id, out var container)) + { + definition.Data.Execution = new ContainerActionExecutionData() + { + Image = container.ContainerImage + }; + } + else + { + definition.Data.Execution = new ContainerActionExecutionData() + { + Image = dockerFileLowerCase + }; + } + } + else + { + var fullPath = IOUtil.ResolvePath(actionDirectory, "."); // resolve full path without access filesystem. + throw new NotSupportedException($"Can't find 'action.yml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?"); + } + } + else if (action.Reference.Type == Pipelines.ActionSourceType.Script) + { + definition.Data.Execution = new ScriptActionExecutionData(); + definition.Data.Name = "Run"; + definition.Data.Description = "Execute a script"; + } + else + { + throw new NotSupportedException(action.Reference.Type.ToString()); + } + + return definition; + } + + private async Task PullActionContainerAsync(IExecutionContext executionContext, object data) + { + var setupInfo = data as ContainerSetupInfo; + ArgUtil.NotNull(setupInfo, nameof(setupInfo)); + ArgUtil.NotNullOrEmpty(setupInfo.Container.Image, nameof(setupInfo.Container.Image)); + + executionContext.Output($"Pull down action image '{setupInfo.Container.Image}'"); + + // Pull down docker image with retry up to 3 times + var dockerManger = HostContext.GetService(); + int retryCount = 0; + int pullExitCode = 0; + while (retryCount < 3) + { + pullExitCode = await dockerManger.DockerPull(executionContext, setupInfo.Container.Image); + if (pullExitCode == 0) + { + break; + } + else + { + retryCount++; + if (retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + executionContext.Warning($"Docker pull failed with exit code {pullExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + if (retryCount == 3 && pullExitCode != 0) + { + throw new InvalidOperationException($"Docker pull failed with exit code {pullExitCode}"); + } + + foreach (var stepId in setupInfo.StepIds) + { + CachedActionContainers[stepId] = new ContainerInfo() { ContainerImage = setupInfo.Container.Image }; + Trace.Info($"Prepared docker image '{setupInfo.Container.Image}' for action {stepId} ({setupInfo.Container.Image})"); + } + } + + private async Task BuildActionContainerAsync(IExecutionContext executionContext, object data) + { + var setupInfo = data as ContainerSetupInfo; + ArgUtil.NotNull(setupInfo, nameof(setupInfo)); + ArgUtil.NotNullOrEmpty(setupInfo.Container.Dockerfile, nameof(setupInfo.Container.Dockerfile)); + + executionContext.Output($"Build container for action use: '{setupInfo.Container.Dockerfile}'."); + + // Build docker image with retry up to 3 times + var dockerManger = HostContext.GetService(); + int retryCount = 0; + int buildExitCode = 0; + var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}"; + while (retryCount < 3) + { + buildExitCode = await dockerManger.DockerBuild(executionContext, setupInfo.Container.WorkingDirectory, Directory.GetParent(setupInfo.Container.Dockerfile).FullName, imageName); + if (buildExitCode == 0) + { + break; + } + else + { + retryCount++; + if (retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + executionContext.Warning($"Docker build failed with exit code {buildExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + if (retryCount == 3 && buildExitCode != 0) + { + throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}"); + } + + foreach (var stepId in setupInfo.StepIds) + { + CachedActionContainers[stepId] = new ContainerInfo() { ContainerImage = imageName }; + Trace.Info($"Prepared docker image '{imageName}' for action {stepId} ({setupInfo.Container.Dockerfile})"); + } + } + + private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + + var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference; + ArgUtil.NotNull(repositoryReference, nameof(repositoryReference)); + + if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase)) + { + Trace.Info($"Repository action is in 'self' repository."); + return; + } + + if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase)) + { + throw new NotSupportedException(repositoryReference.RepositoryType); + } + + ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name)); + ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref)); + + string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref); + if (File.Exists(destDirectory + ".completed")) + { + executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'."); + return; + } + else + { + // make sure we get an clean folder ready to use. + IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken); + Directory.CreateDirectory(destDirectory); + executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'"); + } + +#if OS_WINDOWS + string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/zipball/{repositoryReference.Ref}"; +#else + string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/tarball/{repositoryReference.Ref}"; +#endif + Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'."); + + //download and extract action in a temp folder and rename it on success + string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid()); + Directory.CreateDirectory(tempDirectory); + + +#if OS_WINDOWS + string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip"); +#else + string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz"); +#endif + Trace.Info($"Save archive '{archiveLink}' into {archiveFile}."); + try + { + + int retryCount = 0; + + // Allow up to 20 * 60s for any action to be downloaded from github graph. + int timeoutSeconds = 20 * 60; + while (retryCount < 3) + { + using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds))) + using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken)) + { + try + { + //open zip stream in async mode + using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true)) + using (var httpClientHandler = HostContext.CreateHttpClientHandler()) + using (var httpClient = new HttpClient(httpClientHandler)) + { + var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN"); + if (string.IsNullOrEmpty(authToken)) + { + // TODO: Depreciate the PREVIEW_ACTION_TOKEN + authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN"); + } + + if (!string.IsNullOrEmpty(authToken)) + { + HostContext.SecretMasker.AddValue(authToken); + var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}")); + httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken); + } + else + { + var accessToken = executionContext.GetGitHubContext("token"); + var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}")); + httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken); + } + + httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent); + using (var result = await httpClient.GetStreamAsync(archiveLink)) + { + await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token); + await fs.FlushAsync(actionDownloadCancellation.Token); + + // download succeed, break out the retry loop. + break; + } + } + } + catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested) + { + Trace.Info($"Action download has been cancelled."); + throw; + } + catch (Exception ex) when (retryCount < 2) + { + retryCount++; + Trace.Error($"Fail to download archive '{archiveLink}' -- Attempt: {retryCount}"); + Trace.Error(ex); + if (actionDownloadTimeout.Token.IsCancellationRequested) + { + // action download didn't finish within timeout + executionContext.Warning($"Action '{archiveLink}' didn't finish download within {timeoutSeconds} seconds."); + } + else + { + executionContext.Warning($"Failed to download action '{archiveLink}'. Error {ex.Message}"); + } + } + } + + if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF"))) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30)); + executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + + ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile)); + executionContext.Debug($"Download '{archiveLink}' to '{archiveFile}'"); + + var stagingDirectory = Path.Combine(tempDirectory, "_staging"); + Directory.CreateDirectory(stagingDirectory); + +#if OS_WINDOWS + ZipFile.ExtractToDirectory(archiveFile, stagingDirectory); +#else + string tar = WhichUtil.Which("tar", require: true, trace: Trace); + + // tar -xzf + using (var processInvoker = HostContext.CreateService()) + { + processInvoker.OutputDataReceived += new EventHandler((sender, args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + Trace.Info(args.Data); + } + }); + + processInvoker.ErrorDataReceived += new EventHandler((sender, args) => + { + if (!string.IsNullOrEmpty(args.Data)) + { + Trace.Error(args.Data); + } + }); + + int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken); + if (exitCode != 0) + { + throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}."); + } + } +#endif + + // repository archive from github always contains a nested folder + var subDirectories = new DirectoryInfo(stagingDirectory).GetDirectories(); + if (subDirectories.Length != 1) + { + throw new InvalidOperationException($"'{archiveFile}' contains '{subDirectories.Length}' directories"); + } + else + { + executionContext.Debug($"Unwrap '{subDirectories[0].Name}' to '{destDirectory}'"); + IOUtil.CopyDirectory(subDirectories[0].FullName, destDirectory, executionContext.CancellationToken); + } + + Trace.Verbose("Create watermark file indicate action download succeed."); + File.WriteAllText(destDirectory + ".completed", DateTime.UtcNow.ToString()); + + executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'."); + Trace.Info("Finished getting action repository."); + } + finally + { + try + { + //if the temp folder wasn't moved -> wipe it + if (Directory.Exists(tempDirectory)) + { + Trace.Verbose("Deleting action temp folder: {0}", tempDirectory); + IOUtil.DeleteDirectory(tempDirectory, CancellationToken.None); // Don't cancel this cleanup and should be pretty fast. + } + } + catch (Exception ex) + { + //it is not critical if we fail to delete the temp folder + Trace.Warning("Failed to delete temp folder '{0}'. Exception: {1}", tempDirectory, ex); + } + } + } + + private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction) + { + var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference; + if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase)) + { + Trace.Info($"Repository action is in 'self' repository."); + return null; + } + + var setupInfo = new ActionContainer(); + string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref); + string actionEntryDirectory = destDirectory; + string dockerFileRelativePath = repositoryReference.Name; + ArgUtil.NotNull(repositoryReference, nameof(repositoryReference)); + if (!string.IsNullOrEmpty(repositoryReference.Path)) + { + actionEntryDirectory = Path.Combine(destDirectory, repositoryReference.Path); + dockerFileRelativePath = $"{dockerFileRelativePath}/{repositoryReference.Path}"; + setupInfo.ActionRepository = $"{repositoryReference.Name}/{repositoryReference.Path}@{repositoryReference.Ref}"; + } + else + { + setupInfo.ActionRepository = $"{repositoryReference.Name}@{repositoryReference.Ref}"; + } + + // find the docker file or action.yml file + var dockerFile = Path.Combine(actionEntryDirectory, "Dockerfile"); + var dockerFileLowerCase = Path.Combine(actionEntryDirectory, "dockerfile"); + var actionManifest = Path.Combine(actionEntryDirectory, "action.yml"); + if (File.Exists(actionManifest)) + { + executionContext.Debug($"action.yml for action: '{actionManifest}'."); + var manifestManager = HostContext.GetService(); + var actionDefinitionData = manifestManager.Load(executionContext, actionManifest); + + if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Container) + { + var containerAction = actionDefinitionData.Execution as ContainerActionExecutionData; + if (containerAction.Image.EndsWith("Dockerfile") || containerAction.Image.EndsWith("dockerfile")) + { + var dockerFileFullPath = Path.Combine(actionEntryDirectory, containerAction.Image); + executionContext.Debug($"Dockerfile for action: '{dockerFileFullPath}'."); + + setupInfo.Dockerfile = dockerFileFullPath; + setupInfo.WorkingDirectory = destDirectory; + return setupInfo; + } + else if (containerAction.Image.StartsWith("docker://", StringComparison.OrdinalIgnoreCase)) + { + var actionImage = containerAction.Image.Substring("docker://".Length); + + executionContext.Debug($"Container image for action: '{actionImage}'."); + + setupInfo.Image = actionImage; + return setupInfo; + } + else + { + throw new NotSupportedException($"'{containerAction.Image}' should be either '[path]/Dockerfile' or 'docker://image[:tag]'."); + } + } + else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.NodeJS) + { + Trace.Info($"Action node.js file: {(actionDefinitionData.Execution as NodeJSActionExecutionData).Script}, no more preparation."); + return null; + } + else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Plugin) + { + Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation."); + return null; + } + else + { + throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString()); + } + } + else if (File.Exists(dockerFile)) + { + executionContext.Debug($"Dockerfile for action: '{dockerFile}'."); + setupInfo.Dockerfile = dockerFile; + setupInfo.WorkingDirectory = destDirectory; + return setupInfo; + } + else if (File.Exists(dockerFileLowerCase)) + { + executionContext.Debug($"Dockerfile for action: '{dockerFileLowerCase}'."); + setupInfo.Dockerfile = dockerFileLowerCase; + setupInfo.WorkingDirectory = destDirectory; + return setupInfo; + } + else + { + var fullPath = IOUtil.ResolvePath(actionEntryDirectory, "."); // resolve full path without access filesystem. + throw new InvalidOperationException($"Can't find 'action.yml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?"); + } + } + } + + public sealed class Definition + { + public ActionDefinitionData Data { get; set; } + public string Directory { get; set; } + } + + public sealed class ActionDefinitionData + { + public string Name { get; set; } + + public string Description { get; set; } + + public MappingToken Inputs { get; set; } + + public ActionExecutionData Execution { get; set; } + + public Dictionary Deprecated { get; set; } + } + + public enum ActionExecutionType + { + Container, + NodeJS, + Plugin, + Script, + } + + public sealed class ContainerActionExecutionData : ActionExecutionData + { + public override ActionExecutionType ExecutionType => ActionExecutionType.Container; + + public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup); + + public string Image { get; set; } + + public string EntryPoint { get; set; } + + public SequenceToken Arguments { get; set; } + + public MappingToken Environment { get; set; } + + public string Cleanup { get; set; } + } + + public sealed class NodeJSActionExecutionData : ActionExecutionData + { + public override ActionExecutionType ExecutionType => ActionExecutionType.NodeJS; + + public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup); + + public string Script { get; set; } + + public string Cleanup { get; set; } + } + + public sealed class PluginActionExecutionData : ActionExecutionData + { + public override ActionExecutionType ExecutionType => ActionExecutionType.Plugin; + + public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup); + + public string Plugin { get; set; } + + public string Cleanup { get; set; } + } + + public sealed class ScriptActionExecutionData : ActionExecutionData + { + public override ActionExecutionType ExecutionType => ActionExecutionType.Script; + + public override bool HasCleanup => false; + } + + public abstract class ActionExecutionData + { + private string _cleanupCondition = $"{Constants.Expressions.Always}()"; + + public abstract ActionExecutionType ExecutionType { get; } + + public abstract bool HasCleanup { get; } + + public string CleanupCondition + { + get { return _cleanupCondition; } + set { _cleanupCondition = value; } + } + } + + public class ContainerSetupInfo + { + public ContainerSetupInfo(List ids, string image) + { + StepIds = ids; + Container = new ActionContainer() + { + Image = image + }; + } + + public ContainerSetupInfo(List ids, string dockerfile, string workingDirectory) + { + StepIds = ids; + Container = new ActionContainer() + { + Dockerfile = dockerfile, + WorkingDirectory = workingDirectory + }; + } + + public List StepIds { get; set; } + + public ActionContainer Container { get; set; } + } + + public class ActionContainer + { + public string Image { get; set; } + public string Dockerfile { get; set; } + public string WorkingDirectory { get; set; } + public string ActionRepository { get; set; } + } +} + diff --git a/src/Runner.Worker/ActionManifestManager.cs b/src/Runner.Worker/ActionManifestManager.cs new file mode 100644 index 00000000000..210e32f866a --- /dev/null +++ b/src/Runner.Worker/ActionManifestManager.cs @@ -0,0 +1,980 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using System.Reflection; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Schema; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using YamlDotNet.Core; +using YamlDotNet.Core.Events; +using System.Globalization; +using System.Linq; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(ActionManifestManager))] + public interface IActionManifestManager : IRunnerService + { + ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile); + + List EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary contextData); + + Dictionary EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary contextData); + + string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token, IDictionary contextData); + } + + public sealed class ActionManifestManager : RunnerService, IActionManifestManager + { + private TemplateSchema _actionManifestSchema; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + var assembly = Assembly.GetExecutingAssembly(); + var json = default(string); + using (var stream = assembly.GetManifestResourceStream("GitHub.Runner.Worker.action_yaml.json")) + using (var streamReader = new StreamReader(stream)) + { + json = streamReader.ReadToEnd(); + } + + var objectReader = new JsonObjectReader(null, json); + _actionManifestSchema = TemplateSchema.Load(objectReader); + ArgUtil.NotNull(_actionManifestSchema, nameof(_actionManifestSchema)); + Trace.Info($"Load schema file with definitions: {StringUtil.ConvertToJson(_actionManifestSchema.Definitions.Keys)}"); + } + + public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile) + { + var context = CreateContext(executionContext, null); + ActionDefinitionData actionDefinition = new ActionDefinitionData(); + try + { + var token = default(TemplateToken); + + // Get the file ID + var fileId = context.GetFileId(manifestFile); + var fileContent = File.ReadAllText(manifestFile); + using (var stringReader = new StringReader(fileContent)) + { + var yamlObjectReader = new YamlObjectReader(null, stringReader); + token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _); + } + + var actionMapping = token.AssertMapping("action manifest root"); + foreach (var actionPair in actionMapping) + { + var propertyName = actionPair.Key.AssertString($"action.yml property key"); + + switch (propertyName.Value) + { + case "name": + actionDefinition.Name = actionPair.Value.AssertString("name").Value; + break; + + case "description": + actionDefinition.Description = actionPair.Value.AssertString("description").Value; + break; + + case "inputs": + ConvertInputs(context, actionPair.Value, actionDefinition); + break; + + case "runs": + actionDefinition.Execution = ConvertRuns(context, actionPair.Value); + break; + default: + Trace.Info($"Ignore action property {propertyName}."); + break; + } + } + } + catch (Exception ex) + { + Trace.Error(ex); + context.Errors.Add(ex); + } + + if (context.Errors.Count > 0) + { + foreach (var error in context.Errors) + { + Trace.Error($"Action.yml load error: {error.Message}"); + executionContext.Error(error.Message); + } + + throw new ArgumentException($"Fail to load {manifestFile}"); + } + + if (actionDefinition.Execution == null) + { + executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}"); + throw new ArgumentException($"Top level 'run:' section is required for {manifestFile}"); + } + else + { + Trace.Info($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}"); + } + + return actionDefinition; + } + + public List EvaluateContainerArguments( + IExecutionContext executionContext, + SequenceToken token, + IDictionary contextData) + { + var result = new List(); + + if (token != null) + { + var context = CreateContext(executionContext, contextData); + try + { + var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true); + context.Errors.Check(); + + Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}"); + + // Sequence + var args = evaluateResult.AssertSequence("container args"); + + foreach (var arg in args) + { + var str = arg.AssertString("container arg").Value; + result.Add(str); + Trace.Info($"Add argument {str}"); + } + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + Trace.Error(ex); + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + + public Dictionary EvaluateContainerEnvironment( + IExecutionContext executionContext, + MappingToken token, + IDictionary contextData) + { + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (token != null) + { + var context = CreateContext(executionContext, contextData); + try + { + var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true); + context.Errors.Check(); + + Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}"); + + // Mapping + var mapping = evaluateResult.AssertMapping("container env"); + + foreach (var pair in mapping) + { + // Literal key + var key = pair.Key.AssertString("container env key"); + + // Literal value + var value = pair.Value.AssertString("container env value"); + result[key.Value] = value.Value; + + Trace.Info($"Add env {key} = {value}"); + } + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + Trace.Error(ex); + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + + public string EvaluateDefaultInput( + IExecutionContext executionContext, + string inputName, + TemplateToken token, + IDictionary contextData) + { + string result = ""; + if (token != null) + { + var context = CreateContext(executionContext, contextData); + try + { + var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true); + context.Errors.Check(); + + Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}"); + + // String + result = evaluateResult.AssertString($"default value for input '{inputName}'").Value; + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + Trace.Error(ex); + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + + private TemplateContext CreateContext( + IExecutionContext executionContext, + IDictionary contextData) + { + var result = new TemplateContext + { + CancellationToken = CancellationToken.None, + Errors = new TemplateValidationErrors(10, 500), + Memory = new TemplateMemory( + maxDepth: 100, + maxEvents: 1000000, + maxBytes: 10 * 1024 * 1024), + Schema = _actionManifestSchema, + TraceWriter = executionContext.ToTemplateTraceWriter(), + }; + + if (contextData?.Count > 0) + { + foreach (var pair in contextData) + { + result.ExpressionValues[pair.Key] = pair.Value; + } + } + + return result; + } + + private ActionExecutionData ConvertRuns( + TemplateContext context, + TemplateToken inputsToken) + { + var runsMapping = inputsToken.AssertMapping("runs"); + var usingToken = default(StringToken); + var imageToken = default(StringToken); + var argsToken = default(SequenceToken); + var entrypointToken = default(StringToken); + var envToken = default(MappingToken); + var mainToken = default(StringToken); + var pluginToken = default(StringToken); + var postToken = default(StringToken); + var postEntrypointToken = default(StringToken); + var postIfToken = default(StringToken); + foreach (var run in runsMapping) + { + var runsKey = run.Key.AssertString("runs key").Value; + switch (runsKey) + { + case "using": + usingToken = run.Value.AssertString("using"); + break; + case "image": + imageToken = run.Value.AssertString("image"); + break; + case "args": + argsToken = run.Value.AssertSequence("args"); + break; + case "entrypoint": + entrypointToken = run.Value.AssertString("entrypoint"); + break; + case "env": + envToken = run.Value.AssertMapping("env"); + break; + case "main": + mainToken = run.Value.AssertString("main"); + break; + case "plugin": + pluginToken = run.Value.AssertString("plugin"); + break; + case "post": + postToken = run.Value.AssertString("post"); + break; + case "post-entrypoint": + postEntrypointToken = run.Value.AssertString("post-entrypoint"); + break; + case "post-if": + postIfToken = run.Value.AssertString("post-if"); + break; + default: + Trace.Info($"Ignore run property {runsKey}."); + break; + } + } + + if (usingToken != null) + { + if (string.Equals(usingToken.Value, "docker", StringComparison.OrdinalIgnoreCase)) + { + if (string.IsNullOrEmpty(imageToken?.Value)) + { + throw new ArgumentNullException($"Image is not provided."); + } + else + { + return new ContainerActionExecutionData() + { + Image = imageToken.Value, + Arguments = argsToken, + EntryPoint = entrypointToken?.Value, + Environment = envToken, + Cleanup = postEntrypointToken?.Value, + CleanupCondition = postIfToken?.Value + }; + } + } + else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase)) + { + if (string.IsNullOrEmpty(mainToken?.Value)) + { + throw new ArgumentNullException($"Entry javascript fils is not provided."); + } + else + { + return new NodeJSActionExecutionData() + { + Script = mainToken.Value, + Cleanup = postToken?.Value, + CleanupCondition = postIfToken?.Value + }; + } + } + else + { + throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead."); + } + } + else if (pluginToken != null) + { + return new PluginActionExecutionData() + { + Plugin = pluginToken.Value + }; + } + + throw new NotSupportedException(nameof(ConvertRuns)); + } + + private void ConvertInputs( + TemplateContext context, + TemplateToken inputsToken, + ActionDefinitionData actionDefinition) + { + actionDefinition.Inputs = new MappingToken(null, null, null); + var inputsMapping = inputsToken.AssertMapping("inputs"); + foreach (var input in inputsMapping) + { + bool hasDefault = false; + var inputName = input.Key.AssertString("input name"); + var inputMetadata = input.Value.AssertMapping("input metadata"); + foreach (var metadata in inputMetadata) + { + var metadataName = metadata.Key.AssertString("input metadata").Value; + if (string.Equals(metadataName, "default", StringComparison.OrdinalIgnoreCase)) + { + hasDefault = true; + actionDefinition.Inputs.Add(inputName, metadata.Value); + } + else if (string.Equals(metadataName, "deprecationMessage", StringComparison.OrdinalIgnoreCase)) + { + if (actionDefinition.Deprecated == null) + { + actionDefinition.Deprecated = new Dictionary(); + } + var message = metadata.Value.AssertString("input deprecationMessage"); + actionDefinition.Deprecated.Add(inputName.Value, message.Value); + } + } + + if (!hasDefault) + { + actionDefinition.Inputs.Add(inputName, new StringToken(null, null, null, string.Empty)); + } + } + } + } + + /// + /// Converts a YAML file into a TemplateToken + /// + internal sealed class YamlObjectReader : IObjectReader + { + internal YamlObjectReader( + Int32? fileId, + TextReader input) + { + m_fileId = fileId; + m_parser = new Parser(input); + } + + public Boolean AllowLiteral(out LiteralToken value) + { + if (EvaluateCurrent() is Scalar scalar) + { + // Tag specified + if (!string.IsNullOrEmpty(scalar.Tag)) + { + // String tag + if (string.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal)) + { + value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value); + MoveNext(); + return true; + } + + // Not plain style + if (scalar.Style != ScalarStyle.Plain) + { + throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'"); + } + + // Boolean, Float, Integer, or Null + switch (scalar.Tag) + { + case c_booleanTag: + value = ParseBoolean(scalar); + break; + case c_floatTag: + value = ParseFloat(scalar); + break; + case c_integerTag: + value = ParseInteger(scalar); + break; + case c_nullTag: + value = ParseNull(scalar); + break; + default: + throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'"); + } + + MoveNext(); + return true; + } + + // Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + if (scalar.Style == ScalarStyle.Plain) + { + if (MatchNull(scalar, out var nullToken)) + { + value = nullToken; + } + else if (MatchBoolean(scalar, out var booleanToken)) + { + value = booleanToken; + } + else if (MatchInteger(scalar, out var numberToken) || + MatchFloat(scalar, out numberToken)) + { + value = numberToken; + } + else + { + value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value); + } + + MoveNext(); + return true; + } + + // Otherwise assume string + value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value); + MoveNext(); + return true; + } + + value = default; + return false; + } + + public Boolean AllowSequenceStart(out SequenceToken value) + { + if (EvaluateCurrent() is SequenceStart sequenceStart) + { + value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column); + MoveNext(); + return true; + } + + value = default; + return false; + } + + public Boolean AllowSequenceEnd() + { + if (EvaluateCurrent() is SequenceEnd) + { + MoveNext(); + return true; + } + + return false; + } + + public Boolean AllowMappingStart(out MappingToken value) + { + if (EvaluateCurrent() is MappingStart mappingStart) + { + value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column); + MoveNext(); + return true; + } + + value = default; + return false; + } + + public Boolean AllowMappingEnd() + { + if (EvaluateCurrent() is MappingEnd) + { + MoveNext(); + return true; + } + + return false; + } + + /// + /// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd. + /// + public void ValidateEnd() + { + if (EvaluateCurrent() is DocumentEnd) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected document end parse event"); + } + + if (EvaluateCurrent() is StreamEnd) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected stream end parse event"); + } + + if (MoveNext()) + { + throw new InvalidOperationException("Expected end of parse events"); + } + } + + /// + /// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart. + /// + public void ValidateStart() + { + if (EvaluateCurrent() != null) + { + throw new InvalidOperationException("Unexpected parser state"); + } + + if (!MoveNext()) + { + throw new InvalidOperationException("Expected a parse event"); + } + + if (EvaluateCurrent() is StreamStart) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected stream start parse event"); + } + + if (EvaluateCurrent() is DocumentStart) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected document start parse event"); + } + } + + private ParsingEvent EvaluateCurrent() + { + if (m_current == null) + { + m_current = m_parser.Current; + if (m_current != null) + { + if (m_current is Scalar scalar) + { + // Verify not using achors + if (scalar.Anchor != null) + { + throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'"); + } + } + else if (m_current is MappingStart mappingStart) + { + // Verify not using achors + if (mappingStart.Anchor != null) + { + throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'"); + } + } + else if (m_current is SequenceStart sequenceStart) + { + // Verify not using achors + if (sequenceStart.Anchor != null) + { + throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'"); + } + } + else if (!(m_current is MappingEnd) && + !(m_current is SequenceEnd) && + !(m_current is DocumentStart) && + !(m_current is DocumentEnd) && + !(m_current is StreamStart) && + !(m_current is StreamEnd)) + { + throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}"); + } + } + } + + return m_current; + } + + private Boolean MoveNext() + { + m_current = null; + return m_parser.MoveNext(); + } + + private BooleanToken ParseBoolean(Scalar scalar) + { + if (MatchBoolean(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_booleanTag); // throws + return default; + } + + private NumberToken ParseFloat(Scalar scalar) + { + if (MatchFloat(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_floatTag); // throws + return default; + } + + private NumberToken ParseInteger(Scalar scalar) + { + if (MatchInteger(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_integerTag); // throws + return default; + } + + private NullToken ParseNull(Scalar scalar) + { + if (MatchNull(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_nullTag); // throws + return default; + } + + private Boolean MatchBoolean( + Scalar scalar, + out BooleanToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + switch (scalar.Value ?? string.Empty) + { + case "true": + case "True": + case "TRUE": + value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true); + return true; + case "false": + case "False": + case "FALSE": + value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false); + return true; + } + + value = default; + return false; + } + + private Boolean MatchFloat( + Scalar scalar, + out NumberToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + var str = scalar.Value; + if (!string.IsNullOrEmpty(str)) + { + // Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN + switch (str) + { + case ".inf": + case ".Inf": + case ".INF": + case "+.inf": + case "+.Inf": + case "+.INF": + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity); + return true; + case "-.inf": + case "-.Inf": + case "-.INF": + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity); + return true; + case ".nan": + case ".NaN": + case ".NAN": + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN); + return true; + } + + // Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)? + + // Skip leading sign + var index = str[0] == '-' || str[0] == '+' ? 1 : 0; + + // Check for integer portion + var length = str.Length; + var hasInteger = false; + while (index < length && str[index] >= '0' && str[index] <= '9') + { + hasInteger = true; + index++; + } + + // Check for decimal point + var hasDot = false; + if (index < length && str[index] == '.') + { + hasDot = true; + index++; + } + + // Check for decimal portion + var hasDecimal = false; + while (index < length && str[index] >= '0' && str[index] <= '9') + { + hasDecimal = true; + index++; + } + + // Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?) + if ((hasDot && hasDecimal) || hasInteger) + { + // Check for end + if (index == length) + { + // Try parse + if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue); + return true; + } + // Otherwise exceeds range + else + { + ThrowInvalidValue(scalar, c_floatTag); // throws + } + } + // Check [eE][-+]?[0-9] + else if (index < length && (str[index] == 'e' || str[index] == 'E')) + { + index++; + + // Skip sign + if (index < length && (str[index] == '-' || str[index] == '+')) + { + index++; + } + + // Check for exponent + var hasExponent = false; + while (index < length && str[index] >= '0' && str[index] <= '9') + { + hasExponent = true; + index++; + } + + // Check for end + if (hasExponent && index == length) + { + // Try parse + if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue); + return true; + } + // Otherwise exceeds range + else + { + ThrowInvalidValue(scalar, c_floatTag); // throws + } + } + } + } + } + + value = default; + return false; + } + + private Boolean MatchInteger( + Scalar scalar, + out NumberToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + var str = scalar.Value; + if (!string.IsNullOrEmpty(str)) + { + // Check for [0-9]+ + var firstChar = str[0]; + if (firstChar >= '0' && firstChar <= '9' && + str.Skip(1).All(x => x >= '0' && x <= '9')) + { + // Try parse + if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue); + return true; + } + + // Otherwise exceeds range + ThrowInvalidValue(scalar, c_integerTag); // throws + } + // Check for (-|+)[0-9]+ + else if ((firstChar == '-' || firstChar == '+') && + str.Length > 1 && + str.Skip(1).All(x => x >= '0' && x <= '9')) + { + // Try parse + if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue); + return true; + } + + // Otherwise exceeds range + ThrowInvalidValue(scalar, c_integerTag); // throws + } + // Check for 0x[0-9a-fA-F]+ + else if (firstChar == '0' && + str.Length > 2 && + str[1] == 'x' && + str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F'))) + { + // Try parse + if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue); + return true; + } + + // Otherwise exceeds range + ThrowInvalidValue(scalar, c_integerTag); // throws + } + // Check for 0o[0-9]+ + else if (firstChar == '0' && + str.Length > 2 && + str[1] == 'o' && + str.Skip(2).All(x => x >= '0' && x <= '7')) + { + // Try parse + var integerValue = default(Int32); + try + { + integerValue = Convert.ToInt32(str.Substring(2), 8); + } + // Otherwise exceeds range + catch (Exception) + { + ThrowInvalidValue(scalar, c_integerTag); // throws + } + + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue); + return true; + } + } + + value = default; + return false; + } + + private Boolean MatchNull( + Scalar scalar, + out NullToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + switch (scalar.Value ?? string.Empty) + { + case "": + case "null": + case "Null": + case "NULL": + case "~": + value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column); + return true; + } + + value = default; + return false; + } + + private void ThrowInvalidValue( + Scalar scalar, + String tag) + { + throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'"); + } + + private const String c_booleanTag = "tag:yaml.org,2002:bool"; + private const String c_floatTag = "tag:yaml.org,2002:float"; + private const String c_integerTag = "tag:yaml.org,2002:int"; + private const String c_nullTag = "tag:yaml.org,2002:null"; + private const String c_stringTag = "tag:yaml.org,2002:string"; + private readonly Int32? m_fileId; + private readonly Parser m_parser; + private ParsingEvent m_current; + } +} + diff --git a/src/Runner.Worker/ActionRunner.cs b/src/Runner.Worker/ActionRunner.cs new file mode 100644 index 00000000000..0ec510c66c8 --- /dev/null +++ b/src/Runner.Worker/ActionRunner.cs @@ -0,0 +1,324 @@ +using System; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Worker.Handlers; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using System.Collections.Generic; + +namespace GitHub.Runner.Worker +{ + public enum ActionRunStage + { + Main, + Post, + } + + [ServiceLocator(Default = typeof(ActionRunner))] + public interface IActionRunner : IStep, IRunnerService + { + ActionRunStage Stage { get; set; } + Boolean TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context); + Pipelines.ActionStep Action { get; set; } + } + + public sealed class ActionRunner : RunnerService, IActionRunner + { + private bool _didFullyEvaluateDisplayName = false; + + private string _displayName; + + public ActionRunStage Stage { get; set; } + + public string Condition { get; set; } + + public TemplateToken ContinueOnError => Action?.ContinueOnError; + + public string DisplayName + { + get + { + // TODO: remove the Action.DisplayName check post m158 deploy, it is done for back compat for older servers + if (!string.IsNullOrEmpty(Action?.DisplayName)) + { + return Action?.DisplayName; + } + return string.IsNullOrEmpty(_displayName) ? "run" : _displayName; + } + set + { + _displayName = value; + } + } + + public IExecutionContext ExecutionContext { get; set; } + + public Pipelines.ActionStep Action { get; set; } + + public TemplateToken Timeout => Action?.TimeoutInMinutes; + + public async Task RunAsync() + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); + ArgUtil.NotNull(Action, nameof(Action)); + var taskManager = HostContext.GetService(); + var handlerFactory = HostContext.GetService(); + + // Load the task definition and choose the handler. + Definition definition = taskManager.LoadAction(ExecutionContext, Action); + ArgUtil.NotNull(definition, nameof(definition)); + + ActionExecutionData handlerData = definition.Data?.Execution; + ArgUtil.NotNull(handlerData, nameof(handlerData)); + + // The action has post cleanup defined. + // we need to create timeline record for them and add them to the step list that StepRunner is using + if (handlerData.HasCleanup && Stage == ActionRunStage.Main) + { + string postDisplayName = null; + if (this.DisplayName.StartsWith(PipelineTemplateConstants.RunDisplayPrefix)) + { + postDisplayName = $"Post {this.DisplayName.Substring(PipelineTemplateConstants.RunDisplayPrefix.Length)}"; + } + else + { + postDisplayName = $"Post {this.DisplayName}"; + } + ExecutionContext.RegisterPostJobAction(postDisplayName, handlerData.CleanupCondition, Action); + } + + IStepHost stepHost = HostContext.CreateService(); + + // Makes directory for event_path data + var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp); + var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow"); + Directory.CreateDirectory(workflowDirectory); + + var gitHubEvent = ExecutionContext.GetGitHubContext("event"); + + // adds the GitHub event path/file if the event exists + if (gitHubEvent != null) + { + var workflowFile = Path.Combine(workflowDirectory, "event.json"); + Trace.Info($"Write event payload to {workflowFile}"); + File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false)); + ExecutionContext.SetGitHubContext("event_path", workflowFile); + } + + // Setup container stephost for running inside the container. + if (ExecutionContext.Container != null) + { + // Make sure required container is already created. + ArgUtil.NotNullOrEmpty(ExecutionContext.Container.ContainerId, nameof(ExecutionContext.Container.ContainerId)); + var containerStepHost = HostContext.CreateService(); + containerStepHost.Container = ExecutionContext.Container; + stepHost = containerStepHost; + } + + // Load the inputs. + ExecutionContext.Debug("Loading inputs"); + var templateTrace = ExecutionContext.ToTemplateTraceWriter(); + var schema = new PipelineTemplateSchemaFactory().CreateSchema(); + var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema); + var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues); + + foreach (KeyValuePair input in inputs) + { + string message = ""; + if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true) + { + ExecutionContext.Warning(String.Format("Input '{0}' has been deprecated with message: {1}", input.Key, message)); + } + } + + // Merge the default inputs from the definition + if (definition.Data?.Inputs != null) + { + var manifestManager = HostContext.GetService(); + foreach (var input in (definition.Data?.Inputs)) + { + string key = input.Key.AssertString("action input name").Value; + if (!inputs.ContainsKey(key)) + { + var evaluateContext = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var data in ExecutionContext.ExpressionValues) + { + evaluateContext[data.Key] = data.Value; + } + + inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value, evaluateContext); + } + } + } + + // Load the action environment. + ExecutionContext.Debug("Loading env"); + var environment = new Dictionary(VarUtil.EnvironmentVariableKeyComparer); + + // Apply environment set using ##[set-env] first since these are job level env + foreach (var env in ExecutionContext.EnvironmentVariables) + { + environment[env.Key] = env.Value ?? string.Empty; + } + + // Apply action's env block later. + var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(Action.Environment, ExecutionContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer); + foreach (var env in actionEnvironment) + { + environment[env.Key] = env.Value ?? string.Empty; + } + + // Apply action's intra-action state at last + foreach (var state in ExecutionContext.IntraActionState) + { + environment[$"STATE_{state.Key}"] = state.Value ?? string.Empty; + } + + // Create the handler. + IHandler handler = handlerFactory.Create( + ExecutionContext, + Action.Reference, + stepHost, + handlerData, + inputs, + environment, + ExecutionContext.Variables, + actionDirectory: definition.Directory); + + // Print out action details + handler.PrintActionDetails(Stage); + + // Run the task. + await handler.RunAsync(Stage); + } + + public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context) + { + ArgUtil.NotNull(context, nameof(context)); + ArgUtil.NotNull(Action, nameof(Action)); + + // If we have already expanded the display name, there is no need to expand it again + // TODO: Remove the ShouldEvaluateDisplayName check and field post m158 deploy, we should do it by default once the server is updated + if (_didFullyEvaluateDisplayName || !string.IsNullOrEmpty(Action.DisplayName)) + { + return false; + } + + bool didFullyEvaluate; + _displayName = GenerateDisplayName(Action, contextData, context, out didFullyEvaluate); + + // If we evaluated fully mask any secrets + if (didFullyEvaluate) + { + _displayName = HostContext.SecretMasker.MaskSecrets(_displayName); + } + context.Debug($"Set step '{Action.Name}' display name to: '{_displayName}'"); + _didFullyEvaluateDisplayName = didFullyEvaluate; + return didFullyEvaluate; + } + + private string GenerateDisplayName(ActionStep action, DictionaryContextData contextData, IExecutionContext context, out bool didFullyEvaluate) + { + ArgUtil.NotNull(context, nameof(context)); + ArgUtil.NotNull(action, nameof(action)); + + var displayName = string.Empty; + var prefix = string.Empty; + var tokenToParse = default(ScalarToken); + didFullyEvaluate = false; + // Get the token we need to parse + // It could be passed in as the Display Name, or we have to pull it from various parts of the Action. + if (action.DisplayNameToken != null) + { + tokenToParse = action.DisplayNameToken as ScalarToken; + } + else if (action.Reference?.Type == ActionSourceType.Repository) + { + prefix = PipelineTemplateConstants.RunDisplayPrefix; + var repositoryReference = action.Reference as RepositoryPathReference; + var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}"; + var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" : + $"{repositoryReference.Name}{pathString}@{repositoryReference.Ref}"; + tokenToParse = new StringToken(null, null, null, repoString); + } + else if (action.Reference?.Type == ActionSourceType.ContainerRegistry) + { + prefix = PipelineTemplateConstants.RunDisplayPrefix; + var containerReference = action.Reference as ContainerRegistryReference; + tokenToParse = new StringToken(null, null, null, containerReference.Image); + } + else if (action.Reference?.Type == ActionSourceType.Script) + { + prefix = PipelineTemplateConstants.RunDisplayPrefix; + var inputs = action.Inputs.AssertMapping(null); + foreach (var pair in inputs) + { + var propertyName = pair.Key.AssertString($"{PipelineTemplateConstants.Steps}"); + if (string.Equals(propertyName.Value, "script", StringComparison.OrdinalIgnoreCase)) + { + tokenToParse = pair.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}"); + break; + } + } + } + else + { + context.Error($"Encountered an unknown action reference type when evaluating the display name: {action.Reference?.Type}"); + return displayName; + } + + // If we have nothing to parse, abort + if (tokenToParse == null) + { + return displayName; + } + // Try evaluating fully + var schema = new PipelineTemplateSchemaFactory().CreateSchema(); + var templateEvaluator = new PipelineTemplateEvaluator(context.ToTemplateTraceWriter(), schema); + try + { + didFullyEvaluate = templateEvaluator.TryEvaluateStepDisplayName(tokenToParse, contextData, out displayName); + } + catch (TemplateValidationException e) + { + context.Warning($"Encountered an error when evaluating display name {tokenToParse.ToString()}. {e.Message}"); + return displayName; + } + + // Default to a prettified token if we could not evaluate + if (!didFullyEvaluate) + { + displayName = tokenToParse.ToDisplayString(); + } + + displayName = FormatStepName(prefix, displayName); + return displayName; + } + + private static string FormatStepName(string prefix, string stepName) + { + if (string.IsNullOrEmpty(stepName)) + { + return string.Empty; + } + + var result = stepName.TrimStart(' ', '\t', '\r', '\n'); + var firstNewLine = result.IndexOfAny(new[] { '\r', '\n' }); + if (firstNewLine >= 0) + { + result = result.Substring(0, firstNewLine); + } + return $"{prefix}{result}"; + } + } +} diff --git a/src/Runner.Worker/Container/ContainerInfo.cs b/src/Runner.Worker/Container/ContainerInfo.cs new file mode 100644 index 00000000000..86885dbc424 --- /dev/null +++ b/src/Runner.Worker/Container/ContainerInfo.cs @@ -0,0 +1,318 @@ +using System; +using System.Collections.Generic; +using System.IO; +using GitHub.Runner.Common.Util; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker.Container +{ + public class ContainerInfo + { + private IDictionary _userMountVolumes; + private List _mountVolumes; + private IDictionary _userPortMappings; + private List _portMappings; + private IDictionary _environmentVariables; + private List _pathMappings = new List(); + + public ContainerInfo() + { + + } + + public ContainerInfo(IHostContext hostContext, Pipelines.JobContainer container, bool isJobContainer = true, string networkAlias = null) + { + this.ContainerName = container.Alias; + + string containerImage = container.Image; + ArgUtil.NotNullOrEmpty(containerImage, nameof(containerImage)); + + this.ContainerImage = containerImage; + this.ContainerDisplayName = $"{container.Alias}_{Pipelines.Validation.NameValidation.Sanitize(containerImage)}_{Guid.NewGuid().ToString("N").Substring(0, 6)}"; + this.ContainerCreateOptions = container.Options; + _environmentVariables = container.Environment; + this.IsJobContainer = isJobContainer; + this.ContainerNetworkAlias = networkAlias; + +#if OS_WINDOWS + _pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Work), "C:\\__w")); + _pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Tools), "C:\\__t")); // Tool cache folder may come from ENV, so we need a unique folder to avoid collision + _pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Externals), "C:\\__e")); + // add -v '\\.\pipe\docker_engine:\\.\pipe\docker_engine' when they are available (17.09) +#else + _pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Work), "/__w")); + _pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Tools), "/__t")); // Tool cache folder may come from ENV, so we need a unique folder to avoid collision + _pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Externals), "/__e")); + if (this.IsJobContainer) + { + this.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock")); + } +#endif + if (container.Ports?.Count > 0) + { + foreach (var port in container.Ports) + { + UserPortMappings[port] = port; + } + } + if (container.Volumes?.Count > 0) + { + foreach (var volume in container.Volumes) + { + UserMountVolumes[volume] = volume; + } + } + } + + public string ContainerId { get; set; } + public string ContainerDisplayName { get; set; } + public string ContainerNetwork { get; set; } + public string ContainerNetworkAlias { get; set; } + public string ContainerImage { get; set; } + public string ContainerName { get; set; } + public string ContainerEntryPointArgs { get; set; } + public string ContainerEntryPoint { get; set; } + public string ContainerWorkDirectory { get; set; } + public string ContainerCreateOptions { get; private set; } + public string ContainerRuntimePath { get; set; } + public bool IsJobContainer { get; set; } + + public IDictionary ContainerEnvironmentVariables + { + get + { + if (_environmentVariables == null) + { + _environmentVariables = new Dictionary(); + } + + return _environmentVariables; + } + } + + public IDictionary UserMountVolumes + { + get + { + if (_userMountVolumes == null) + { + _userMountVolumes = new Dictionary(); + } + return _userMountVolumes; + } + } + + public List MountVolumes + { + get + { + if (_mountVolumes == null) + { + _mountVolumes = new List(); + } + + return _mountVolumes; + } + } + + public IDictionary UserPortMappings + { + get + { + if (_userPortMappings == null) + { + _userPortMappings = new Dictionary(); + } + + return _userPortMappings; + } + } + + public List PortMappings + { + get + { + if (_portMappings == null) + { + _portMappings = new List(); + } + + return _portMappings; + } + } + + public string TranslateToContainerPath(string path) + { + if (!string.IsNullOrEmpty(path)) + { + foreach (var mapping in _pathMappings) + { +#if OS_WINDOWS + if (string.Equals(path, mapping.HostPath, StringComparison.OrdinalIgnoreCase)) + { + return mapping.ContainerPath; + } + + if (path.StartsWith(mapping.HostPath + Path.DirectorySeparatorChar, StringComparison.OrdinalIgnoreCase) || + path.StartsWith(mapping.HostPath + Path.AltDirectorySeparatorChar, StringComparison.OrdinalIgnoreCase)) + { + return mapping.ContainerPath + path.Remove(0, mapping.HostPath.Length); + } +#else + if (string.Equals(path, mapping.HostPath)) + { + return mapping.ContainerPath; + } + + if (path.StartsWith(mapping.HostPath + Path.DirectorySeparatorChar)) + { + return mapping.ContainerPath + path.Remove(0, mapping.HostPath.Length); + } +#endif + } + } + + return path; + } + + public string TranslateToHostPath(string path) + { + if (!string.IsNullOrEmpty(path)) + { + foreach (var mapping in _pathMappings) + { +#if OS_WINDOWS + if (string.Equals(path, mapping.ContainerPath, StringComparison.OrdinalIgnoreCase)) + { + return mapping.HostPath; + } + + if (path.StartsWith(mapping.ContainerPath + Path.DirectorySeparatorChar, StringComparison.OrdinalIgnoreCase) || + path.StartsWith(mapping.ContainerPath + Path.AltDirectorySeparatorChar, StringComparison.OrdinalIgnoreCase)) + { + return mapping.HostPath + path.Remove(0, mapping.ContainerPath.Length); + } +#else + if (string.Equals(path, mapping.ContainerPath)) + { + return mapping.HostPath; + } + + if (path.StartsWith(mapping.ContainerPath + Path.DirectorySeparatorChar)) + { + return mapping.HostPath + path.Remove(0, mapping.ContainerPath.Length); + } +#endif + } + } + + return path; + } + + public void AddPortMappings(List portMappings) + { + foreach (var port in portMappings) + { + PortMappings.Add(port); + } + } + + public void AddPathTranslateMapping(string hostCommonPath, string containerCommonPath) + { + _pathMappings.Insert(0, new PathMapping(hostCommonPath, containerCommonPath)); + } + } + + public class MountVolume + { + public MountVolume(string sourceVolumePath, string targetVolumePath, bool readOnly = false) + { + this.SourceVolumePath = sourceVolumePath; + this.TargetVolumePath = targetVolumePath; + this.ReadOnly = readOnly; + } + + public MountVolume(string fromString) + { + ParseVolumeString(fromString); + } + + private void ParseVolumeString(string volume) + { + var volumeSplit = volume.Split(":"); + if (volumeSplit.Length == 3) + { + // source:target:ro + SourceVolumePath = volumeSplit[0]; + TargetVolumePath = volumeSplit[1]; + ReadOnly = String.Equals(volumeSplit[2], "ro", StringComparison.OrdinalIgnoreCase); + } + else if (volumeSplit.Length == 2) + { + if (String.Equals(volumeSplit[1], "ro", StringComparison.OrdinalIgnoreCase)) + { + // target:ro + TargetVolumePath = volumeSplit[0]; + ReadOnly = true; + } + else + { + // source:target + SourceVolumePath = volumeSplit[0]; + TargetVolumePath = volumeSplit[1]; + ReadOnly = false; + } + } + else + { + // target - or, default to passing straight through + TargetVolumePath = volume; + ReadOnly = false; + } + } + + public string SourceVolumePath { get; set; } + public string TargetVolumePath { get; set; } + public bool ReadOnly { get; set; } + } + + public class PortMapping + { + public PortMapping(string hostPort, string containerPort, string protocol) + { + this.HostPort = hostPort; + this.ContainerPort = containerPort; + this.Protocol = protocol; + } + + public string HostPort { get; set; } + public string ContainerPort { get; set; } + public string Protocol { get; set; } + } + + public class DockerVersion + { + public DockerVersion(Version serverVersion, Version clientVersion) + { + this.ServerVersion = serverVersion; + this.ClientVersion = clientVersion; + } + + public Version ServerVersion { get; set; } + public Version ClientVersion { get; set; } + } + + public class PathMapping + { + public PathMapping(string hostPath, string containerPath) + { + this.HostPath = hostPath; + this.ContainerPath = containerPath; + } + + public string HostPath { get; set; } + public string ContainerPath { get; set; } + } +} diff --git a/src/Runner.Worker/Container/DockerCommandManager.cs b/src/Runner.Worker/Container/DockerCommandManager.cs new file mode 100644 index 00000000000..555cdefcfc0 --- /dev/null +++ b/src/Runner.Worker/Container/DockerCommandManager.cs @@ -0,0 +1,433 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker.Container +{ + [ServiceLocator(Default = typeof(DockerCommandManager))] + public interface IDockerCommandManager : IRunnerService + { + string DockerPath { get; } + string DockerInstanceLabel { get; } + Task DockerVersion(IExecutionContext context); + Task DockerPull(IExecutionContext context, string image); + Task DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag); + Task DockerCreate(IExecutionContext context, ContainerInfo container); + Task DockerRun(IExecutionContext context, ContainerInfo container, EventHandler stdoutDataReceived, EventHandler stderrDataReceived); + Task DockerStart(IExecutionContext context, string containerId); + Task DockerLogs(IExecutionContext context, string containerId); + Task> DockerPS(IExecutionContext context, string options); + Task DockerRemove(IExecutionContext context, string containerId); + Task DockerNetworkCreate(IExecutionContext context, string network); + Task DockerNetworkRemove(IExecutionContext context, string network); + Task DockerNetworkPrune(IExecutionContext context); + Task DockerExec(IExecutionContext context, string containerId, string options, string command); + Task DockerExec(IExecutionContext context, string containerId, string options, string command, List outputs); + Task> DockerInspect(IExecutionContext context, string dockerObject, string options); + Task> DockerPort(IExecutionContext context, string containerId); + } + + public class DockerCommandManager : RunnerService, IDockerCommandManager + { + public string DockerPath { get; private set; } + + public string DockerInstanceLabel { get; private set; } + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + DockerPath = WhichUtil.Which("docker", true, Trace); + DockerInstanceLabel = IOUtil.GetPathHash(hostContext.GetDirectory(WellKnownDirectory.Root)).Substring(0, 6); + } + + public async Task DockerVersion(IExecutionContext context) + { + string serverVersionStr = (await ExecuteDockerCommandAsync(context, "version", "--format '{{.Server.APIVersion}}'")).FirstOrDefault(); + ArgUtil.NotNullOrEmpty(serverVersionStr, "Docker.Server.Version"); + context.Output($"Docker daemon API version: {serverVersionStr}"); + + string clientVersionStr = (await ExecuteDockerCommandAsync(context, "version", "--format '{{.Client.APIVersion}}'")).FirstOrDefault(); + ArgUtil.NotNullOrEmpty(serverVersionStr, "Docker.Client.Version"); + context.Output($"Docker client API version: {clientVersionStr}"); + + // we interested about major.minor.patch version + Regex verRegex = new Regex("\\d+\\.\\d+(\\.\\d+)?", RegexOptions.IgnoreCase); + + Version serverVersion = null; + var serverVersionMatchResult = verRegex.Match(serverVersionStr); + if (serverVersionMatchResult.Success && !string.IsNullOrEmpty(serverVersionMatchResult.Value)) + { + if (!Version.TryParse(serverVersionMatchResult.Value, out serverVersion)) + { + serverVersion = null; + } + } + + Version clientVersion = null; + var clientVersionMatchResult = verRegex.Match(serverVersionStr); + if (clientVersionMatchResult.Success && !string.IsNullOrEmpty(clientVersionMatchResult.Value)) + { + if (!Version.TryParse(clientVersionMatchResult.Value, out clientVersion)) + { + clientVersion = null; + } + } + + return new DockerVersion(serverVersion, clientVersion); + } + + public async Task DockerPull(IExecutionContext context, string image) + { + return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken); + } + + public async Task DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag) + { + return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} \"{dockerFile}\"", workingDirectory, context.CancellationToken); + } + + public async Task DockerCreate(IExecutionContext context, ContainerInfo container) + { + IList dockerOptions = new List(); + // OPTIONS + dockerOptions.Add($"--name {container.ContainerDisplayName}"); + dockerOptions.Add($"--label {DockerInstanceLabel}"); + if (!string.IsNullOrEmpty(container.ContainerWorkDirectory)) + { + dockerOptions.Add($"--workdir {container.ContainerWorkDirectory}"); + } + if (!string.IsNullOrEmpty(container.ContainerNetwork)) + { + dockerOptions.Add($"--network {container.ContainerNetwork}"); + } + if (!string.IsNullOrEmpty(container.ContainerNetworkAlias)) + { + dockerOptions.Add($"--network-alias {container.ContainerNetworkAlias}"); + } + foreach (var port in container.UserPortMappings) + { + dockerOptions.Add($"-p {port.Value}"); + } + dockerOptions.Add($"{container.ContainerCreateOptions}"); + foreach (var env in container.ContainerEnvironmentVariables) + { + if (String.IsNullOrEmpty(env.Value)) + { + dockerOptions.Add($"-e \"{env.Key}\""); + } + else + { + dockerOptions.Add($"-e \"{env.Key}={env.Value.Replace("\"", "\\\"")}\""); + } + } + + // Watermark for GitHub Action environment + dockerOptions.Add("-e GITHUB_ACTIONS=true"); + + foreach (var volume in container.MountVolumes) + { + // replace `"` with `\"` and add `"{0}"` to all path. + String volumeArg; + if (String.IsNullOrEmpty(volume.SourceVolumePath)) + { + // Anonymous docker volume + volumeArg = $"-v \"{volume.TargetVolumePath.Replace("\"", "\\\"")}\""; + } + else + { + // Named Docker volume / host bind mount + volumeArg = $"-v \"{volume.SourceVolumePath.Replace("\"", "\\\"")}\":\"{volume.TargetVolumePath.Replace("\"", "\\\"")}\""; + } + if (volume.ReadOnly) + { + volumeArg += ":ro"; + } + dockerOptions.Add(volumeArg); + } + if (!string.IsNullOrEmpty(container.ContainerEntryPoint)) + { + dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\""); + } + // IMAGE + dockerOptions.Add($"{container.ContainerImage}"); + + // COMMAND + // Intentionally blank. Always overwrite ENTRYPOINT and/or send ARGs + + // [ARG...] + dockerOptions.Add($"{container.ContainerEntryPointArgs}"); + + var optionsString = string.Join(" ", dockerOptions); + List outputStrings = await ExecuteDockerCommandAsync(context, "create", optionsString); + + return outputStrings.FirstOrDefault(); + } + + public async Task DockerRun(IExecutionContext context, ContainerInfo container, EventHandler stdoutDataReceived, EventHandler stderrDataReceived) + { + IList dockerOptions = new List(); + // OPTIONS + dockerOptions.Add($"--name {container.ContainerDisplayName}"); + dockerOptions.Add($"--label {DockerInstanceLabel}"); + + dockerOptions.Add($"--workdir {container.ContainerWorkDirectory}"); + dockerOptions.Add($"--rm"); + + foreach (var env in container.ContainerEnvironmentVariables) + { + // e.g. -e MY_SECRET maps the value into the exec'ed process without exposing + // the value directly in the command + dockerOptions.Add($"-e {env.Key}"); + } + + // Watermark for GitHub Action environment + dockerOptions.Add("-e GITHUB_ACTIONS=true"); + + if (!string.IsNullOrEmpty(container.ContainerEntryPoint)) + { + dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\""); + } + + if (!string.IsNullOrEmpty(container.ContainerNetwork)) + { + dockerOptions.Add($"--network {container.ContainerNetwork}"); + } + + foreach (var volume in container.MountVolumes) + { + // replace `"` with `\"` and add `"{0}"` to all path. + String volumeArg; + if (String.IsNullOrEmpty(volume.SourceVolumePath)) + { + // Anonymous docker volume + volumeArg = $"-v \"{volume.TargetVolumePath.Replace("\"", "\\\"")}\""; + } + else + { + // Named Docker volume / host bind mount + volumeArg = $"-v \"{volume.SourceVolumePath.Replace("\"", "\\\"")}\":\"{volume.TargetVolumePath.Replace("\"", "\\\"")}\""; + } + if (volume.ReadOnly) + { + volumeArg += ":ro"; + } + dockerOptions.Add(volumeArg); + } + // IMAGE + dockerOptions.Add($"{container.ContainerImage}"); + + // COMMAND + // Intentionally blank. Always overwrite ENTRYPOINT and/or send ARGs + + // [ARG...] + dockerOptions.Add($"{container.ContainerEntryPointArgs}"); + + var optionsString = string.Join(" ", dockerOptions); + return await ExecuteDockerCommandAsync(context, "run", optionsString, container.ContainerEnvironmentVariables, stdoutDataReceived, stderrDataReceived, context.CancellationToken); + } + + public async Task DockerStart(IExecutionContext context, string containerId) + { + return await ExecuteDockerCommandAsync(context, "start", containerId, context.CancellationToken); + } + + public async Task DockerRemove(IExecutionContext context, string containerId) + { + return await ExecuteDockerCommandAsync(context, "rm", $"--force {containerId}", context.CancellationToken); + } + + public async Task DockerLogs(IExecutionContext context, string containerId) + { + return await ExecuteDockerCommandAsync(context, "logs", $"--details {containerId}", context.CancellationToken); + } + + public async Task> DockerPS(IExecutionContext context, string options) + { + return await ExecuteDockerCommandAsync(context, "ps", options); + } + + public async Task DockerNetworkCreate(IExecutionContext context, string network) + { +#if OS_WINDOWS + return await ExecuteDockerCommandAsync(context, "network", $"create --label {DockerInstanceLabel} {network} --driver nat", context.CancellationToken); +#else + return await ExecuteDockerCommandAsync(context, "network", $"create --label {DockerInstanceLabel} {network}", context.CancellationToken); +#endif + } + + public async Task DockerNetworkRemove(IExecutionContext context, string network) + { + return await ExecuteDockerCommandAsync(context, "network", $"rm {network}", context.CancellationToken); + } + + public async Task DockerNetworkPrune(IExecutionContext context) + { + return await ExecuteDockerCommandAsync(context, "network", $"prune --force --filter \"label={DockerInstanceLabel}\"", context.CancellationToken); + } + + public async Task DockerExec(IExecutionContext context, string containerId, string options, string command) + { + return await ExecuteDockerCommandAsync(context, "exec", $"{options} {containerId} {command}", context.CancellationToken); + } + + public async Task DockerExec(IExecutionContext context, string containerId, string options, string command, List output) + { + ArgUtil.NotNull(output, nameof(output)); + + string arg = $"exec {options} {containerId} {command}".Trim(); + context.Command($"{DockerPath} {arg}"); + + object outputLock = new object(); + var processInvoker = HostContext.CreateService(); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + if (!string.IsNullOrEmpty(message.Data)) + { + lock (outputLock) + { + output.Add(message.Data); + } + } + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + if (!string.IsNullOrEmpty(message.Data)) + { + lock (outputLock) + { + output.Add(message.Data); + } + } + }; + +#if OS_WINDOWS || OS_OSX + throw new NotSupportedException($"Container operation is only supported on Linux"); +#else + return await processInvoker.ExecuteAsync( + workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work), + fileName: DockerPath, + arguments: arg, + environment: null, + requireExitCodeZero: false, + outputEncoding: null, + cancellationToken: CancellationToken.None); +#endif + } + + public async Task> DockerInspect(IExecutionContext context, string dockerObject, string options) + { + return await ExecuteDockerCommandAsync(context, "inspect", $"{options} {dockerObject}"); + } + + public async Task> DockerPort(IExecutionContext context, string containerId) + { + List portMappingLines = await ExecuteDockerCommandAsync(context, "port", containerId); + return DockerUtil.ParseDockerPort(portMappingLines); + } + + private Task ExecuteDockerCommandAsync(IExecutionContext context, string command, string options, CancellationToken cancellationToken = default(CancellationToken)) + { + return ExecuteDockerCommandAsync(context, command, options, null, cancellationToken); + } + + private async Task ExecuteDockerCommandAsync(IExecutionContext context, string command, string options, IDictionary environment, EventHandler stdoutDataReceived, EventHandler stderrDataReceived, CancellationToken cancellationToken = default(CancellationToken)) + { + string arg = $"{command} {options}".Trim(); + context.Command($"{DockerPath} {arg}"); + + var processInvoker = HostContext.CreateService(); + processInvoker.OutputDataReceived += stdoutDataReceived; + processInvoker.ErrorDataReceived += stderrDataReceived; + + +#if OS_WINDOWS || OS_OSX + throw new NotSupportedException($"Container operation is only supported on Linux"); +#else + return await processInvoker.ExecuteAsync( + workingDirectory: context.GetGitHubContext("workspace"), + fileName: DockerPath, + arguments: arg, + environment: environment, + requireExitCodeZero: false, + outputEncoding: null, + killProcessOnCancel: false, + cancellationToken: cancellationToken); +#endif + } + + private async Task ExecuteDockerCommandAsync(IExecutionContext context, string command, string options, string workingDirectory, CancellationToken cancellationToken = default(CancellationToken)) + { + string arg = $"{command} {options}".Trim(); + context.Command($"{DockerPath} {arg}"); + + var processInvoker = HostContext.CreateService(); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + context.Output(message.Data); + }; + +#if OS_WINDOWS || OS_OSX + throw new NotSupportedException($"Container operation is only supported on Linux"); +#else + return await processInvoker.ExecuteAsync( + workingDirectory: workingDirectory ?? context.GetGitHubContext("workspace"), + fileName: DockerPath, + arguments: arg, + environment: null, + requireExitCodeZero: false, + outputEncoding: null, + killProcessOnCancel: false, + redirectStandardIn: null, + cancellationToken: cancellationToken); +#endif + } + + private async Task> ExecuteDockerCommandAsync(IExecutionContext context, string command, string options) + { + string arg = $"{command} {options}".Trim(); + context.Command($"{DockerPath} {arg}"); + + List output = new List(); + var processInvoker = HostContext.CreateService(); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + if (!string.IsNullOrEmpty(message.Data)) + { + output.Add(message.Data); + context.Output(message.Data); + } + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + if (!string.IsNullOrEmpty(message.Data)) + { + context.Output(message.Data); + } + }; + + await processInvoker.ExecuteAsync( + workingDirectory: context.GetGitHubContext("workspace"), + fileName: DockerPath, + arguments: arg, + environment: null, + requireExitCodeZero: true, + outputEncoding: null, + cancellationToken: CancellationToken.None); + + return output; + } + } +} diff --git a/src/Runner.Worker/Container/DockerUtil.cs b/src/Runner.Worker/Container/DockerUtil.cs new file mode 100644 index 00000000000..638a89cce51 --- /dev/null +++ b/src/Runner.Worker/Container/DockerUtil.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Generic; +using System.Text.RegularExpressions; + +namespace GitHub.Runner.Worker.Container +{ + public class DockerUtil + { + public static List ParseDockerPort(IList portMappingLines) + { + const string targetPort = "targetPort"; + const string proto = "proto"; + const string host = "host"; + const string hostPort = "hostPort"; + + //"TARGET_PORT/PROTO -> HOST:HOST_PORT" + string pattern = $"^(?<{targetPort}>\\d+)/(?<{proto}>\\w+) -> (?<{host}>.+):(?<{hostPort}>\\d+)$"; + + List portMappings = new List(); + foreach(var line in portMappingLines) + { + Match m = Regex.Match(line, pattern, RegexOptions.None, TimeSpan.FromSeconds(1)); + if (m.Success) + { + portMappings.Add(new PortMapping( + m.Groups[hostPort].Value, + m.Groups[targetPort].Value, + m.Groups[proto].Value + )); + } + } + return portMappings; + } + + public static string ParsePathFromConfigEnv(IList configEnvLines) + { + // Config format is VAR=value per line + foreach (var line in configEnvLines) + { + var keyValue = line.Split("=", 2); + if (keyValue.Length == 2 && string.Equals(keyValue[0], "PATH")) + { + return keyValue[1]; + } + } + return ""; + } + } +} diff --git a/src/Runner.Worker/ContainerOperationProvider.cs b/src/Runner.Worker/ContainerOperationProvider.cs new file mode 100644 index 00000000000..ab7278c6599 --- /dev/null +++ b/src/Runner.Worker/ContainerOperationProvider.cs @@ -0,0 +1,414 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.ServiceProcess; +using System.Threading.Tasks; +using System.Linq; +using System.Threading; +using GitHub.Runner.Worker.Container; +using GitHub.Services.Common; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.DistributedTask.Pipelines.ContextData; +using Microsoft.Win32; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(ContainerOperationProvider))] + public interface IContainerOperationProvider : IRunnerService + { + Task StartContainersAsync(IExecutionContext executionContext, object data); + Task StopContainersAsync(IExecutionContext executionContext, object data); + } + + public class ContainerOperationProvider : RunnerService, IContainerOperationProvider + { + private IDockerCommandManager _dockerManger; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _dockerManger = HostContext.GetService(); + } + + public async Task StartContainersAsync(IExecutionContext executionContext, object data) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + List containers = data as List; + ArgUtil.NotNull(containers, nameof(containers)); + + // Check whether we are inside a container. + // Our container feature requires to map working directory from host to the container. + // If we are already inside a container, we will not able to find out the real working direcotry path on the host. +#if OS_WINDOWS + // service CExecSvc is Container Execution Agent. + ServiceController[] scServices = ServiceController.GetServices(); + if (scServices.Any(x => String.Equals(x.ServiceName, "cexecsvc", StringComparison.OrdinalIgnoreCase) && x.Status == ServiceControllerStatus.Running)) + { + throw new NotSupportedException("Container feature is not supported when runner is already running inside container."); + } +#elif OS_RHEL6 + // Red Hat and CentOS 6 do not support the container feature + throw new NotSupportedException("Runner does not support the container feature on Red Hat Enterprise Linux 6 or CentOS 6."); +#else + var initProcessCgroup = File.ReadLines("/proc/1/cgroup"); + if (initProcessCgroup.Any(x => x.IndexOf(":/docker/", StringComparison.OrdinalIgnoreCase) >= 0)) + { + throw new NotSupportedException("Container feature is not supported when runner is already running inside container."); + } +#endif + +#if OS_WINDOWS + // Check OS version (Windows server 1803 is required) + object windowsInstallationType = Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion", "InstallationType", defaultValue: null); + ArgUtil.NotNull(windowsInstallationType, nameof(windowsInstallationType)); + object windowsReleaseId = Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion", "ReleaseId", defaultValue: null); + ArgUtil.NotNull(windowsReleaseId, nameof(windowsReleaseId)); + executionContext.Debug($"Current Windows version: '{windowsReleaseId} ({windowsInstallationType})'"); + + if (int.TryParse(windowsReleaseId.ToString(), out int releaseId)) + { + if (!windowsInstallationType.ToString().StartsWith("Server", StringComparison.OrdinalIgnoreCase) || releaseId < 1803) + { + throw new NotSupportedException("Container feature requires Windows Server 1803 or higher."); + } + } + else + { + throw new ArgumentOutOfRangeException(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\ReleaseId"); + } +#endif + + // Check docker client/server version + DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext); + ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion)); + ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion)); + +#if OS_WINDOWS + Version requiredDockerEngineAPIVersion = new Version(1, 30); // Docker-EE version 17.6 +#else + Version requiredDockerEngineAPIVersion = new Version(1, 35); // Docker-CE version 17.12 +#endif + + if (dockerVersion.ServerVersion < requiredDockerEngineAPIVersion) + { + throw new NotSupportedException($"Min required docker engine API server version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManger.DockerPath}') server version is '{dockerVersion.ServerVersion}'"); + } + if (dockerVersion.ClientVersion < requiredDockerEngineAPIVersion) + { + throw new NotSupportedException($"Min required docker engine API client version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManger.DockerPath}') client version is '{dockerVersion.ClientVersion}'"); + } + + // Clean up containers left by previous runs + executionContext.Debug($"Delete stale containers from previous jobs"); + var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\""); + foreach (var staleContainer in staleContainers) + { + int containerRemoveExitCode = await _dockerManger.DockerRemove(executionContext, staleContainer); + if (containerRemoveExitCode != 0) + { + executionContext.Warning($"Delete stale containers failed, docker rm fail with exit code {containerRemoveExitCode} for container {staleContainer}"); + } + } + + executionContext.Debug($"Delete stale container networks from previous jobs"); + int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext); + if (networkPruneExitCode != 0) + { + executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}"); + } + + // Create local docker network for this job to avoid port conflict when multiple agents run on same machine. + // All containers within a job join the same network + var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}"; + await CreateContainerNetworkAsync(executionContext, containerNetwork); + executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork); + + foreach (var container in containers) + { + container.ContainerNetwork = containerNetwork; + await StartContainerAsync(executionContext, container); + } + + foreach (var container in containers.Where(c => !c.IsJobContainer)) + { + await ContainerHealthcheck(executionContext, container); + } + } + + public async Task StopContainersAsync(IExecutionContext executionContext, object data) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + + List containers = data as List; + ArgUtil.NotNull(containers, nameof(containers)); + + foreach (var container in containers) + { + await StopContainerAsync(executionContext, container); + } + // Remove the container network + await RemoveContainerNetworkAsync(executionContext, containers.First().ContainerNetwork); + } + + private async Task StartContainerAsync(IExecutionContext executionContext, ContainerInfo container) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ArgUtil.NotNull(container, nameof(container)); + ArgUtil.NotNullOrEmpty(container.ContainerImage, nameof(container.ContainerImage)); + + Trace.Info($"Container name: {container.ContainerName}"); + Trace.Info($"Container image: {container.ContainerImage}"); + Trace.Info($"Container options: {container.ContainerCreateOptions}"); + foreach (var port in container.UserPortMappings) + { + Trace.Info($"User provided port: {port.Value}"); + } + foreach (var volume in container.UserMountVolumes) + { + Trace.Info($"User provided volume: {volume.Value}"); + } + + // Pull down docker image with retry up to 3 times + int retryCount = 0; + int pullExitCode = 0; + while (retryCount < 3) + { + pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage); + if (pullExitCode == 0) + { + break; + } + else + { + retryCount++; + if (retryCount < 3) + { + var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(10)); + executionContext.Warning($"Docker pull failed with exit code {pullExitCode}, back off {backOff.TotalSeconds} seconds before retry."); + await Task.Delay(backOff); + } + } + } + + if (retryCount == 3 && pullExitCode != 0) + { + throw new InvalidOperationException($"Docker pull failed with exit code {pullExitCode}"); + } + + if (container.IsJobContainer) + { + // Configure job container - Mount workspace and tools, set up environment, and start long running process + var githubContext = executionContext.ExpressionValues["github"] as GitHubContext; + ArgUtil.NotNull(githubContext, nameof(githubContext)); + var workingDirectory = githubContext["workspace"] as StringContextData; + ArgUtil.NotNullOrEmpty(workingDirectory, nameof(workingDirectory)); + container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Work), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Work)))); +#if OS_WINDOWS + container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)))); +#else + container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true)); +#endif + container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Temp), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Temp)))); + container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Actions), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Actions)))); + container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Tools), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Tools)))); + + var tempHomeDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), "_github_home"); + Directory.CreateDirectory(tempHomeDirectory); + container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home")); + container.AddPathTranslateMapping(tempHomeDirectory, "/github/home"); + container.ContainerEnvironmentVariables["HOME"] = container.TranslateToContainerPath(tempHomeDirectory); + + var tempWorkflowDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), "_github_workflow"); + Directory.CreateDirectory(tempWorkflowDirectory); + container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow")); + container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow"); + + container.ContainerWorkDirectory = container.TranslateToContainerPath(workingDirectory); + container.ContainerEntryPoint = "tail"; + container.ContainerEntryPointArgs = "\"-f\" \"/dev/null\""; + } + + container.ContainerId = await _dockerManger.DockerCreate(executionContext, container); + ArgUtil.NotNullOrEmpty(container.ContainerId, nameof(container.ContainerId)); + + // Start container + int startExitCode = await _dockerManger.DockerStart(executionContext, container.ContainerId); + if (startExitCode != 0) + { + throw new InvalidOperationException($"Docker start fail with exit code {startExitCode}"); + } + + try + { + // Make sure container is up and running + var psOutputs = await _dockerManger.DockerPS(executionContext, $"--all --filter id={container.ContainerId} --filter status=running --no-trunc --format \"{{{{.ID}}}} {{{{.Status}}}}\""); + if (psOutputs.FirstOrDefault(x => !string.IsNullOrEmpty(x))?.StartsWith(container.ContainerId) != true) + { + // container is not up and running, pull docker log for this container. + await _dockerManger.DockerPS(executionContext, $"--all --filter id={container.ContainerId} --no-trunc --format \"{{{{.ID}}}} {{{{.Status}}}}\""); + int logsExitCode = await _dockerManger.DockerLogs(executionContext, container.ContainerId); + if (logsExitCode != 0) + { + executionContext.Warning($"Docker logs fail with exit code {logsExitCode}"); + } + + executionContext.Warning($"Docker container {container.ContainerId} is not in running state."); + } + } + catch (Exception ex) + { + // pull container log is best effort. + Trace.Error("Catch exception when check container log and container status."); + Trace.Error(ex); + } + + // Gather runtime container information + if (!container.IsJobContainer) + { + var service = new DictionaryContextData() + { + ["id"] = new StringContextData(container.ContainerId), + ["ports"] = new DictionaryContextData(), + ["network"] = new StringContextData(container.ContainerNetwork) + }; + container.AddPortMappings(await _dockerManger.DockerPort(executionContext, container.ContainerId)); + foreach (var port in container.PortMappings) + { + (service["ports"] as DictionaryContextData)[port.ContainerPort] = new StringContextData(port.HostPort); + } + executionContext.JobContext.Services[container.ContainerNetworkAlias] = service; + } + else + { + var configEnvFormat = "--format \"{{range .Config.Env}}{{println .}}{{end}}\""; + var containerEnv = await _dockerManger.DockerInspect(executionContext, container.ContainerId, configEnvFormat); + container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv); + executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId); + } + } + + private async Task StopContainerAsync(IExecutionContext executionContext, ContainerInfo container) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ArgUtil.NotNull(container, nameof(container)); + + if (!string.IsNullOrEmpty(container.ContainerId)) + { + executionContext.Output($"Stop and remove container: {container.ContainerDisplayName}"); + + int rmExitCode = await _dockerManger.DockerRemove(executionContext, container.ContainerId); + if (rmExitCode != 0) + { + executionContext.Warning($"Docker rm fail with exit code {rmExitCode}"); + } + } + } + +#if !OS_WINDOWS + private async Task> ExecuteCommandAsync(IExecutionContext context, string command, string arg) + { + context.Command($"{command} {arg}"); + + List outputs = new List(); + object outputLock = new object(); + var processInvoker = HostContext.CreateService(); + processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + if (!string.IsNullOrEmpty(message.Data)) + { + lock (outputLock) + { + outputs.Add(message.Data); + } + } + }; + + processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message) + { + if (!string.IsNullOrEmpty(message.Data)) + { + lock (outputLock) + { + outputs.Add(message.Data); + } + } + }; + + await processInvoker.ExecuteAsync( + workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work), + fileName: command, + arguments: arg, + environment: null, + requireExitCodeZero: true, + outputEncoding: null, + cancellationToken: CancellationToken.None); + + foreach (var outputLine in outputs) + { + context.Output(outputLine); + } + + return outputs; + } +#endif + + private async Task CreateContainerNetworkAsync(IExecutionContext executionContext, string network) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + int networkExitCode = await _dockerManger.DockerNetworkCreate(executionContext, network); + if (networkExitCode != 0) + { + throw new InvalidOperationException($"Docker network create failed with exit code {networkExitCode}"); + } + } + + private async Task RemoveContainerNetworkAsync(IExecutionContext executionContext, string network) + { + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ArgUtil.NotNull(network, nameof(network)); + + executionContext.Output($"Remove container network: {network}"); + + int removeExitCode = await _dockerManger.DockerNetworkRemove(executionContext, network); + if (removeExitCode != 0) + { + executionContext.Warning($"Docker network rm failed with exit code {removeExitCode}"); + } + } + + private async Task ContainerHealthcheck(IExecutionContext executionContext, ContainerInfo container) + { + string healthCheck = "--format=\"{{if .Config.Healthcheck}}{{print .State.Health.Status}}{{end}}\""; + string serviceHealth = (await _dockerManger.DockerInspect(context: executionContext, dockerObject: container.ContainerId, options: healthCheck)).FirstOrDefault(); + if (string.IsNullOrEmpty(serviceHealth)) + { + // Container has no HEALTHCHECK + return; + } + var retryCount = 0; + while (string.Equals(serviceHealth, "starting", StringComparison.OrdinalIgnoreCase)) + { + TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff(retryCount, TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(32), TimeSpan.FromSeconds(2)); + executionContext.Output($"{container.ContainerNetworkAlias} service is starting, waiting {backoff.Seconds} seconds before checking again."); + await Task.Delay(backoff, executionContext.CancellationToken); + serviceHealth = (await _dockerManger.DockerInspect(context: executionContext, dockerObject: container.ContainerId, options: healthCheck)).FirstOrDefault(); + retryCount++; + } + if (string.Equals(serviceHealth, "healthy", StringComparison.OrdinalIgnoreCase)) + { + executionContext.Output($"{container.ContainerNetworkAlias} service is healthy."); + } + else + { + throw new InvalidOperationException($"Failed to initialize, {container.ContainerNetworkAlias} service is {serviceHealth}."); + } + } + } +} diff --git a/src/Runner.Worker/DiagnosticLogManager.cs b/src/Runner.Worker/DiagnosticLogManager.cs new file mode 100644 index 00000000000..58df7e1def1 --- /dev/null +++ b/src/Runner.Worker/DiagnosticLogManager.cs @@ -0,0 +1,209 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Text; +using System.Runtime.InteropServices; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Worker; +using GitHub.Runner.Common.Capabilities; +using GitHub.Services.WebApi; +using Microsoft.Win32; +using System.Diagnostics; +using System.Linq; +using System.Collections.ObjectModel; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(DiagnosticLogManager))] + public interface IDiagnosticLogManager : IRunnerService + { + Task UploadDiagnosticLogsAsync(IExecutionContext executionContext, + IExecutionContext parentContext, + Pipelines.AgentJobRequestMessage message, + DateTime jobStartTimeUtc); + } + + // This class manages gathering data for support logs, zipping the data, and uploading it. + // The files are created with the following folder structure: + // ..\_layout\_work\_temp + // \[job name]-support (supportRootFolder) + // \files (supportFolder) + // ... + // support.zip + public sealed class DiagnosticLogManager : RunnerService, IDiagnosticLogManager + { + private static string DateTimeFormat = "yyyyMMdd-HHmmss"; + public async Task UploadDiagnosticLogsAsync(IExecutionContext executionContext, + IExecutionContext parentContext, + Pipelines.AgentJobRequestMessage message, + DateTime jobStartTimeUtc) + { + executionContext.Debug("Starting diagnostic file upload."); + + // Setup folders + // \_layout\_work\_temp\[jobname-support] + executionContext.Debug("Setting up diagnostic log folders."); + string tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp); + ArgUtil.Directory(tempDirectory, nameof(tempDirectory)); + + string supportRootFolder = Path.Combine(tempDirectory, message.JobName + "-support"); + Directory.CreateDirectory(supportRootFolder); + + // \_layout\_work\_temp\[jobname-support]\files + executionContext.Debug("Creating diagnostic log files folder."); + string supportFilesFolder = Path.Combine(supportRootFolder, "files"); + Directory.CreateDirectory(supportFilesFolder); + + // Create the environment file + // \_layout\_work\_temp\[jobname-support]\files\environment.txt + var configurationStore = HostContext.GetService(); + RunnerSettings settings = configurationStore.GetSettings(); + int runnerId = settings.AgentId; + string runnerName = settings.AgentName; + int poolId = settings.PoolId; + + // Copy worker diagnostic log files + List workerDiagnosticLogFiles = GetWorkerDiagnosticLogFiles(HostContext.GetDirectory(WellKnownDirectory.Diag), jobStartTimeUtc); + executionContext.Debug($"Copying {workerDiagnosticLogFiles.Count()} worker diagnostic logs."); + + foreach (string workerLogFile in workerDiagnosticLogFiles) + { + ArgUtil.File(workerLogFile, nameof(workerLogFile)); + + string destination = Path.Combine(supportFilesFolder, Path.GetFileName(workerLogFile)); + File.Copy(workerLogFile, destination); + } + + // Copy runner diag log files + List runnerDiagnosticLogFiles = GetRunnerDiagnosticLogFiles(HostContext.GetDirectory(WellKnownDirectory.Diag), jobStartTimeUtc); + executionContext.Debug($"Copying {runnerDiagnosticLogFiles.Count()} runner diagnostic logs."); + + foreach (string runnerLogFile in runnerDiagnosticLogFiles) + { + ArgUtil.File(runnerLogFile, nameof(runnerLogFile)); + + string destination = Path.Combine(supportFilesFolder, Path.GetFileName(runnerLogFile)); + File.Copy(runnerLogFile, destination); + } + + executionContext.Debug("Zipping diagnostic files."); + + string buildNumber = executionContext.Variables.Build_Number ?? "UnknownBuildNumber"; + string buildName = $"Build {buildNumber}"; + string phaseName = executionContext.Variables.System_PhaseDisplayName ?? "UnknownPhaseName"; + + // zip the files + string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip"; + string diagnosticsZipFilePath = Path.Combine(supportRootFolder, diagnosticsZipFileName); + ZipFile.CreateFromDirectory(supportFilesFolder, diagnosticsZipFilePath); + + // upload the json metadata file + executionContext.Debug("Uploading diagnostic metadata file."); + string metadataFileName = $"diagnostics-{buildName}-{phaseName}.json"; + string metadataFilePath = Path.Combine(supportFilesFolder, metadataFileName); + string phaseResult = GetTaskResultAsString(executionContext.Result); + + IOUtil.SaveObject(new DiagnosticLogMetadata(runnerName, runnerId, poolId, phaseName, diagnosticsZipFileName, phaseResult), metadataFilePath); + + // TODO: Remove the parentContext Parameter and replace this with executioncontext. Currently a bug exists where these files do not upload correctly using that context. + parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: metadataFileName, filePath: metadataFilePath); + + parentContext.QueueAttachFile(type: CoreAttachmentType.DiagnosticLog, name: diagnosticsZipFileName, filePath: diagnosticsZipFilePath); + + executionContext.Debug("Diagnostic file upload complete."); + } + + private string GetTaskResultAsString(TaskResult? taskResult) + { + if (!taskResult.HasValue) { return "Unknown"; } + + return taskResult.ToString(); + } + + // The current solution is a hack. We need to rethink this and find a better one. + // The list of worker log files isn't available from the logger. It's also nested several levels deep. + // For this solution we deduce the applicable worker log files by comparing their create time to the start time of the job. + private List GetWorkerDiagnosticLogFiles(string diagnosticFolder, DateTime jobStartTimeUtc) + { + // Get all worker log files with a timestamp equal or greater than the start of the job + var workerLogFiles = new List(); + var directoryInfo = new DirectoryInfo(diagnosticFolder); + + // Sometimes the timing is off between the job start time and the time the worker log file is created. + // This adds a small buffer that provides some leeway in case the worker log file was created slightly + // before the time we log as job start time. + int bufferInSeconds = -30; + DateTime searchTimeUtc = jobStartTimeUtc.AddSeconds(bufferInSeconds); + + foreach (FileInfo file in directoryInfo.GetFiles().Where(f => f.Name.StartsWith(Constants.Path.WorkerDiagnosticLogPrefix))) + { + // The format of the logs is: + // Worker_20171003-143110-utc.log + DateTime fileCreateTime = DateTime.ParseExact( + s: file.Name.Substring(startIndex: Constants.Path.WorkerDiagnosticLogPrefix.Length, length: DateTimeFormat.Length), + format: DateTimeFormat, + provider: CultureInfo.InvariantCulture); + + if (fileCreateTime >= searchTimeUtc) + { + workerLogFiles.Add(file.FullName); + } + } + + return workerLogFiles; + } + + private List GetRunnerDiagnosticLogFiles(string diagnosticFolder, DateTime jobStartTimeUtc) + { + // Get the newest runner log file that created just before the start of the job + var runnerLogFiles = new List(); + var directoryInfo = new DirectoryInfo(diagnosticFolder); + + // The runner log that record the start point of the job should created before the job start time. + // The runner log may get paged if it reach size limit. + // We will only need upload 1 runner log file in 99%. + // There might be 1% we need to upload 2 runner log files. + String recentLog = null; + DateTime recentTimeUtc = DateTime.MinValue; + + foreach (FileInfo file in directoryInfo.GetFiles().Where(f => f.Name.StartsWith(Constants.Path.RunnerDiagnosticLogPrefix))) + { + // The format of the logs is: + // Runner_20171003-143110-utc.log + if (DateTime.TryParseExact( + s: file.Name.Substring(startIndex: Constants.Path.RunnerDiagnosticLogPrefix.Length, length: DateTimeFormat.Length), + format: DateTimeFormat, + provider: CultureInfo.InvariantCulture, + style: DateTimeStyles.None, + result: out DateTime fileCreateTime)) + { + // always add log file created after the job start. + if (fileCreateTime >= jobStartTimeUtc) + { + runnerLogFiles.Add(file.FullName); + } + else if (fileCreateTime > recentTimeUtc) + { + recentLog = file.FullName; + recentTimeUtc = fileCreateTime; + } + } + } + + if (!String.IsNullOrEmpty(recentLog)) + { + runnerLogFiles.Add(recentLog); + } + + return runnerLogFiles; + } + } +} diff --git a/src/Runner.Worker/ExecutionContext.cs b/src/Runner.Worker/ExecutionContext.cs new file mode 100644 index 00000000000..2da42fa867c --- /dev/null +++ b/src/Runner.Worker/ExecutionContext.cs @@ -0,0 +1,1025 @@ +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using System.Web; +using GitHub.Runner.Worker.Container; +using GitHub.Services.WebApi; +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using Newtonsoft.Json; +using System.Text; +using System.Collections; + +namespace GitHub.Runner.Worker +{ + public class ExecutionContextType + { + public static string Job = "Job"; + public static string Task = "Task"; + } + + [ServiceLocator(Default = typeof(ExecutionContext))] + public interface IExecutionContext : IRunnerService + { + Guid Id { get; } + string ScopeName { get; } + string ContextName { get; } + Task ForceCompleted { get; } + TaskResult? Result { get; set; } + string ResultCode { get; set; } + TaskResult? CommandResult { get; set; } + CancellationToken CancellationToken { get; } + List Endpoints { get; } + List SecureFiles { get; } + + PlanFeatures Features { get; } + Variables Variables { get; } + Dictionary IntraActionState { get; } + HashSet OutputVariables { get; } + IDictionary EnvironmentVariables { get; } + IDictionary Scopes { get; } + StepsContext StepsContext { get; } + DictionaryContextData ExpressionValues { get; } + List PrependPath { get; } + ContainerInfo Container { get; set; } + List ServiceContainers { get; } + JobContext JobContext { get; } + + // Only job level ExecutionContext has JobSteps + Queue JobSteps { get; } + + // Only job level ExecutionContext has PostJobSteps + Stack PostJobSteps { get; } + + // Initialize + void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token); + void CancelToken(); + IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary intraActionState = null, int? recordOrder = null); + + // logging + bool WriteDebug { get; } + long Write(string tag, string message); + void QueueAttachFile(string type, string name, string filePath); + + // timeline record update methods + void Start(string currentOperation = null); + TaskResult Complete(TaskResult? result = null, string currentOperation = null, string resultCode = null); + void SetEnvContext(string name, string value); + void SetRunnerContext(string name, string value); + string GetGitHubContext(string name); + void SetGitHubContext(string name, string value); + void SetOutput(string name, string value, out string reference); + void SetTimeout(TimeSpan? timeout); + void AddIssue(Issue issue, string message = null); + void Progress(int percentage, string currentOperation = null); + void UpdateDetailTimelineRecord(TimelineRecord record); + + void UpdateTimelineRecordDisplayName(string displayName); + + // matchers + void Add(OnMatcherChanged handler); + void Remove(OnMatcherChanged handler); + void AddMatchers(IssueMatchersConfig matcher); + void RemoveMatchers(IEnumerable owners); + IEnumerable GetMatchers(); + + // others + void ForceTaskComplete(); + void RegisterPostJobAction(string displayName, string condition, Pipelines.ActionStep action); + } + + public sealed class ExecutionContext : RunnerService, IExecutionContext + { + private const int _maxIssueCount = 10; + + private readonly TimelineRecord _record = new TimelineRecord(); + private readonly Dictionary _detailRecords = new Dictionary(); + private readonly object _loggerLock = new object(); + private readonly HashSet _outputvariables = new HashSet(StringComparer.OrdinalIgnoreCase); + private readonly object _matchersLock = new object(); + + private event OnMatcherChanged _onMatcherChanged; + + private IssueMatcherConfig[] _matchers; + + private IPagingLogger _logger; + private IJobServerQueue _jobServerQueue; + private ExecutionContext _parentExecutionContext; + + private Guid _mainTimelineId; + private Guid _detailTimelineId; + private bool _expandedForPostJob = false; + private int _childTimelineRecordOrder = 0; + private CancellationTokenSource _cancellationTokenSource; + private TaskCompletionSource _forceCompleted = new TaskCompletionSource(); + private bool _throttlingReported = false; + + // only job level ExecutionContext will track throttling delay. + private long _totalThrottlingDelayInMilliseconds = 0; + + public Guid Id => _record.Id; + public string ScopeName { get; private set; } + public string ContextName { get; private set; } + public Task ForceCompleted => _forceCompleted.Task; + public CancellationToken CancellationToken => _cancellationTokenSource.Token; + public List Endpoints { get; private set; } + public List SecureFiles { get; private set; } + public Variables Variables { get; private set; } + public Dictionary IntraActionState { get; private set; } + public HashSet OutputVariables => _outputvariables; + public IDictionary EnvironmentVariables { get; private set; } + public IDictionary Scopes { get; private set; } + public StepsContext StepsContext { get; private set; } + public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData(); + public bool WriteDebug { get; private set; } + public List PrependPath { get; private set; } + public ContainerInfo Container { get; set; } + public List ServiceContainers { get; private set; } + + // Only job level ExecutionContext has JobSteps + public Queue JobSteps { get; private set; } + + // Only job level ExecutionContext has PostJobSteps + public Stack PostJobSteps { get; private set; } + + + public TaskResult? Result + { + get + { + return _record.Result; + } + set + { + _record.Result = value; + } + } + + public TaskResult? CommandResult { get; set; } + + private string ContextType => _record.RecordType; + + public string ResultCode + { + get + { + return _record.ResultCode; + } + set + { + _record.ResultCode = value; + } + } + + public PlanFeatures Features { get; private set; } + + private ExecutionContext Root + { + get + { + var result = this; + + while (result._parentExecutionContext != null) + { + result = result._parentExecutionContext; + } + + return result; + } + } + + public JobContext JobContext + { + get + { + return ExpressionValues["job"] as JobContext; + } + } + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + + _jobServerQueue = HostContext.GetService(); + } + + public void CancelToken() + { + try + { + _cancellationTokenSource.Cancel(); + } + catch (ObjectDisposedException e) + { + Trace.Info($"Attempted to cancel a disposed token, the execution is already complete: {e.ToString()}"); + } + } + + public void ForceTaskComplete() + { + Trace.Info("Force finish current task in 5 sec."); + Task.Run(async () => + { + await Task.Delay(TimeSpan.FromSeconds(5)); + _forceCompleted?.TrySetResult(1); + }); + } + + public void RegisterPostJobAction(string displayName, string condition, Pipelines.ActionStep action) + { + if (action.Reference.Type != ActionSourceType.Repository) + { + throw new NotSupportedException("Only action that has `action.yml` can define post job execution."); + } + + var repositoryReference = action.Reference as RepositoryPathReference; + var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}"; + var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" : + $"{repositoryReference.Name}{pathString}@{repositoryReference.Ref}"; + + this.Debug($"Register post job cleanup for action: {repoString}"); + + var actionRunner = HostContext.CreateService(); + actionRunner.Action = action; + actionRunner.Stage = ActionRunStage.Post; + actionRunner.Condition = condition; + actionRunner.DisplayName = displayName; + actionRunner.ExecutionContext = Root.CreatePostChild(displayName, $"{actionRunner.Action.Name}_post", IntraActionState); + Root.PostJobSteps.Push(actionRunner); + } + + public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary intraActionState = null, int? recordOrder = null) + { + Trace.Entering(); + + var child = new ExecutionContext(); + child.Initialize(HostContext); + child.ScopeName = scopeName; + child.ContextName = contextName; + child.Features = Features; + child.Variables = Variables; + child.Endpoints = Endpoints; + child.SecureFiles = SecureFiles; + if (intraActionState == null) + { + child.IntraActionState = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + else + { + child.IntraActionState = intraActionState; + } + child.EnvironmentVariables = EnvironmentVariables; + child.Scopes = Scopes; + child.StepsContext = StepsContext; + foreach (var pair in ExpressionValues) + { + child.ExpressionValues[pair.Key] = pair.Value; + } + child._cancellationTokenSource = new CancellationTokenSource(); + child.WriteDebug = WriteDebug; + child._parentExecutionContext = this; + child.PrependPath = PrependPath; + child.Container = Container; + child.ServiceContainers = ServiceContainers; + + if (recordOrder != null) + { + child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, recordOrder); + } + else + { + child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder); + } + + child._logger = HostContext.CreateService(); + child._logger.Setup(_mainTimelineId, recordId); + + return child; + } + + public void Start(string currentOperation = null) + { + _record.CurrentOperation = currentOperation ?? _record.CurrentOperation; + _record.StartTime = DateTime.UtcNow; + _record.State = TimelineRecordState.InProgress; + + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } + + public TaskResult Complete(TaskResult? result = null, string currentOperation = null, string resultCode = null) + { + if (result != null) + { + Result = result; + } + + // report total delay caused by server throttling. + if (_totalThrottlingDelayInMilliseconds > 0) + { + this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling."); + } + + _record.CurrentOperation = currentOperation ?? _record.CurrentOperation; + _record.ResultCode = resultCode ?? _record.ResultCode; + _record.FinishTime = DateTime.UtcNow; + _record.PercentComplete = 100; + _record.Result = _record.Result ?? TaskResult.Succeeded; + _record.State = TimelineRecordState.Completed; + + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + + // complete all detail timeline records. + if (_detailTimelineId != Guid.Empty && _detailRecords.Count > 0) + { + foreach (var record in _detailRecords) + { + record.Value.FinishTime = record.Value.FinishTime ?? DateTime.UtcNow; + record.Value.PercentComplete = record.Value.PercentComplete ?? 100; + record.Value.Result = record.Value.Result ?? TaskResult.Succeeded; + record.Value.State = TimelineRecordState.Completed; + + _jobServerQueue.QueueTimelineRecordUpdate(_detailTimelineId, record.Value); + } + } + + _cancellationTokenSource?.Dispose(); + + _logger.End(); + + return Result.Value; + } + + public void SetRunnerContext(string name, string value) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + var runnerContext = ExpressionValues["runner"] as RunnerContext; + runnerContext[name] = new StringContextData(value); + } + + public void SetEnvContext(string name, string value) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + +#if OS_WINDOWS + var envContext = ExpressionValues["env"] as DictionaryContextData; + envContext[name] = new StringContextData(value); +#else + var envContext = ExpressionValues["env"] as CaseSensitiveDictionaryContextData; + envContext[name] = new StringContextData(value); +#endif + + } + + public void SetGitHubContext(string name, string value) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + var githubContext = ExpressionValues["github"] as GitHubContext; + githubContext[name] = new StringContextData(value); + } + + public string GetGitHubContext(string name) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + var githubContext = ExpressionValues["github"] as GitHubContext; + if (githubContext.TryGetValue(name, out var value)) + { + if (value is StringContextData) + { + return value as StringContextData; + } + else + { + return value.ToJToken().ToString(Formatting.Indented); + } + } + else + { + return null; + } + } + + public void SetOutput(string name, string value, out string reference) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + + if (String.IsNullOrEmpty(ContextName)) + { + reference = null; + return; + } + + // todo: restrict multiline? + + StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference); + } + + public void SetTimeout(TimeSpan? timeout) + { + if (timeout != null) + { + _cancellationTokenSource.CancelAfter(timeout.Value); + } + } + + public void Progress(int percentage, string currentOperation = null) + { + if (percentage > 100 || percentage < 0) + { + throw new ArgumentOutOfRangeException(nameof(percentage)); + } + + _record.CurrentOperation = currentOperation ?? _record.CurrentOperation; + _record.PercentComplete = Math.Max(percentage, _record.PercentComplete.Value); + + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } + + // This is not thread safe, the caller need to take lock before calling issue() + public void AddIssue(Issue issue, string logMessage = null) + { + ArgUtil.NotNull(issue, nameof(issue)); + + if (string.IsNullOrEmpty(logMessage)) + { + logMessage = issue.Message; + } + + issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message); + + if (issue.Type == IssueType.Error) + { + // tracking line number for each issue in log file + // log UI use this to navigate from issue to log + if (!string.IsNullOrEmpty(logMessage)) + { + long logLineNumber = Write(WellKnownTags.Error, logMessage); + issue.Data["logFileLineNumber"] = logLineNumber.ToString(); + } + + if (_record.ErrorCount < _maxIssueCount) + { + _record.Issues.Add(issue); + } + + _record.ErrorCount++; + } + else if (issue.Type == IssueType.Warning) + { + // tracking line number for each issue in log file + // log UI use this to navigate from issue to log + if (!string.IsNullOrEmpty(logMessage)) + { + long logLineNumber = Write(WellKnownTags.Warning, logMessage); + issue.Data["logFileLineNumber"] = logLineNumber.ToString(); + } + + if (_record.WarningCount < _maxIssueCount) + { + _record.Issues.Add(issue); + } + + _record.WarningCount++; + } + + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } + + public void UpdateDetailTimelineRecord(TimelineRecord record) + { + ArgUtil.NotNull(record, nameof(record)); + + if (record.RecordType == ExecutionContextType.Job) + { + throw new ArgumentOutOfRangeException(nameof(record)); + } + + if (_detailTimelineId == Guid.Empty) + { + // create detail timeline + _detailTimelineId = Guid.NewGuid(); + _record.Details = new Timeline(_detailTimelineId); + + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } + + TimelineRecord existRecord; + if (_detailRecords.TryGetValue(record.Id, out existRecord)) + { + existRecord.Name = record.Name ?? existRecord.Name; + existRecord.RecordType = record.RecordType ?? existRecord.RecordType; + existRecord.Order = record.Order ?? existRecord.Order; + existRecord.ParentId = record.ParentId ?? existRecord.ParentId; + existRecord.StartTime = record.StartTime ?? existRecord.StartTime; + existRecord.FinishTime = record.FinishTime ?? existRecord.FinishTime; + existRecord.PercentComplete = record.PercentComplete ?? existRecord.PercentComplete; + existRecord.CurrentOperation = record.CurrentOperation ?? existRecord.CurrentOperation; + existRecord.Result = record.Result ?? existRecord.Result; + existRecord.ResultCode = record.ResultCode ?? existRecord.ResultCode; + existRecord.State = record.State ?? existRecord.State; + + _jobServerQueue.QueueTimelineRecordUpdate(_detailTimelineId, existRecord); + } + else + { + _detailRecords[record.Id] = record; + _jobServerQueue.QueueTimelineRecordUpdate(_detailTimelineId, record); + } + } + + public void UpdateTimelineRecordDisplayName(string displayName) + { + ArgUtil.NotNull(displayName, nameof(displayName)); + _record.Name = displayName; + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } + + public void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token) + { + // Validation + Trace.Entering(); + ArgUtil.NotNull(message, nameof(message)); + ArgUtil.NotNull(message.Resources, nameof(message.Resources)); + ArgUtil.NotNull(message.Variables, nameof(message.Variables)); + ArgUtil.NotNull(message.Plan, nameof(message.Plan)); + + _cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); + + // Features + Features = PlanUtil.GetFeatures(message.Plan); + + // Endpoints + Endpoints = message.Resources.Endpoints; + + // SecureFiles + SecureFiles = message.Resources.SecureFiles; + + // Variables + Variables = new Variables(HostContext, message.Variables); + + // Environment variables shared across all actions + EnvironmentVariables = new Dictionary(VarUtil.EnvironmentVariableKeyComparer); + + // Service container info + ServiceContainers = new List(); + + // Steps context (StepsRunner manages adding the scoped steps context) + StepsContext = new StepsContext(); + + // Scopes + Scopes = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (message.Scopes?.Count > 0) + { + foreach (var scope in message.Scopes) + { + Scopes[scope.Name] = scope; + } + } + + // Expression values + if (message.ContextData?.Count > 0) + { + foreach (var pair in message.ContextData) + { + ExpressionValues[pair.Key] = pair.Value; + } + } + + ExpressionValues["secrets"] = Variables.ToSecretsContext(); + ExpressionValues["runner"] = new RunnerContext(); + ExpressionValues["job"] = new JobContext(); + + Trace.Info("Initialize GitHub context"); + var githubAccessToken = new StringContextData(Variables.Get("system.github.token")); + var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}")); + HostContext.SecretMasker.AddValue(base64EncodedToken); + var githubContext = new GitHubContext(); + githubContext["token"] = githubAccessToken; + var githubDictionary = ExpressionValues["github"].AssertDictionary("github"); + foreach (var pair in githubDictionary) + { + githubContext[pair.Key] = pair.Value; + } + ExpressionValues["github"] = githubContext; + + Trace.Info("Initialize Env context"); +#if OS_WINDOWS + ExpressionValues["env"] = new DictionaryContextData(); +#else + + ExpressionValues["env"] = new CaseSensitiveDictionaryContextData(); +#endif + + // Prepend Path + PrependPath = new List(); + + // JobSteps for job ExecutionContext + JobSteps = new Queue(); + + // PostJobSteps for job ExecutionContext + PostJobSteps = new Stack(); + // Proxy variables + // var agentWebProxy = HostContext.GetService(); + // if (!string.IsNullOrEmpty(agentWebProxy.ProxyAddress)) + // { + // SetRunnerContext("proxyurl", agentWebProxy.ProxyAddress); + + // if (!string.IsNullOrEmpty(agentWebProxy.ProxyUsername)) + // { + // SetRunnerContext("proxyusername", agentWebProxy.ProxyUsername); + // } + + // if (!string.IsNullOrEmpty(agentWebProxy.ProxyPassword)) + // { + // HostContext.SecretMasker.AddValue(agentWebProxy.ProxyPassword); + // SetRunnerContext("proxypassword", agentWebProxy.ProxyPassword); + // } + + // if (agentWebProxy.ProxyBypassList.Count > 0) + // { + // SetRunnerContext("proxybypasslist", JsonUtility.ToString(agentWebProxy.ProxyBypassList)); + // } + // } + + // // Certificate variables + // var agentCert = HostContext.GetService(); + // if (agentCert.SkipServerCertificateValidation) + // { + // SetRunnerContext("sslskipcertvalidation", bool.TrueString); + // } + + // if (!string.IsNullOrEmpty(agentCert.CACertificateFile)) + // { + // SetRunnerContext("sslcainfo", agentCert.CACertificateFile); + // } + + // if (!string.IsNullOrEmpty(agentCert.ClientCertificateFile) && + // !string.IsNullOrEmpty(agentCert.ClientCertificatePrivateKeyFile) && + // !string.IsNullOrEmpty(agentCert.ClientCertificateArchiveFile)) + // { + // SetRunnerContext("clientcertfile", agentCert.ClientCertificateFile); + // SetRunnerContext("clientcertprivatekey", agentCert.ClientCertificatePrivateKeyFile); + // SetRunnerContext("clientcertarchive", agentCert.ClientCertificateArchiveFile); + + // if (!string.IsNullOrEmpty(agentCert.ClientCertificatePassword)) + // { + // HostContext.SecretMasker.AddValue(agentCert.ClientCertificatePassword); + // SetRunnerContext("clientcertpassword", agentCert.ClientCertificatePassword); + // } + // } + + // // Runtime option variables + // var runtimeOptions = HostContext.GetService().GetRunnerRuntimeOptions(); + // if (runtimeOptions != null) + // { + // #if OS_WINDOWS + // if (runtimeOptions.GitUseSecureChannel) + // { + // SetRunnerContext("gituseschannel", runtimeOptions.GitUseSecureChannel.ToString()); + // } + // #endif + // } + + // Job timeline record. + InitializeTimelineRecord( + timelineId: message.Timeline.Id, + timelineRecordId: message.JobId, + parentTimelineRecordId: null, + recordType: ExecutionContextType.Job, + displayName: message.JobDisplayName, + refName: message.JobName, + order: null); // The job timeline record's order is set by server. + + // Logger (must be initialized before writing warnings). + _logger = HostContext.CreateService(); + _logger.Setup(_mainTimelineId, _record.Id); + + // Verbosity (from GitHub.Step_Debug). + WriteDebug = Variables.Step_Debug ?? false; + + // Hook up JobServerQueueThrottling event, we will log warning on server tarpit. + _jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived; + } + + // Do not add a format string overload. In general, execution context messages are user facing and + // therefore should be localized. Use the Loc methods from the StringUtil class. The exception to + // the rule is command messages - which should be crafted using strongly typed wrapper methods. + public long Write(string tag, string message) + { + string msg = HostContext.SecretMasker.MaskSecrets($"{tag}{message}"); + long totalLines; + lock (_loggerLock) + { + totalLines = _logger.TotalLines + 1; + _logger.Write(msg); + } + + // write to job level execution context's log file. + if (_parentExecutionContext != null) + { + lock (_parentExecutionContext._loggerLock) + { + _parentExecutionContext._logger.Write(msg); + } + } + + _jobServerQueue.QueueWebConsoleLine(_record.Id, msg); + return totalLines; + } + + public void QueueAttachFile(string type, string name, string filePath) + { + ArgUtil.NotNullOrEmpty(type, nameof(type)); + ArgUtil.NotNullOrEmpty(name, nameof(name)); + ArgUtil.NotNullOrEmpty(filePath, nameof(filePath)); + + if (!File.Exists(filePath)) + { + throw new FileNotFoundException($"Can't attach (type:{type} name:{name}) file: {filePath}. File does not exist."); + } + + _jobServerQueue.QueueFileUpload(_mainTimelineId, _record.Id, type, name, filePath, deleteSource: false); + } + + // Add OnMatcherChanged + public void Add(OnMatcherChanged handler) + { + Root._onMatcherChanged += handler; + } + + // Remove OnMatcherChanged + public void Remove(OnMatcherChanged handler) + { + Root._onMatcherChanged -= handler; + } + + // Add Issue matchers + public void AddMatchers(IssueMatchersConfig config) + { + var root = Root; + + // Lock + lock (root._matchersLock) + { + var newMatchers = new List(); + + // Prepend + var newOwners = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var matcher in config.Matchers) + { + newOwners.Add(matcher.Owner); + newMatchers.Add(matcher); + } + + // Add existing non-matching + var existingMatchers = root._matchers ?? Array.Empty(); + newMatchers.AddRange(existingMatchers.Where(x => !newOwners.Contains(x.Owner))); + + // Store + root._matchers = newMatchers.ToArray(); + + // Fire events + foreach (var matcher in config.Matchers) + { + root._onMatcherChanged(null, new MatcherChangedEventArgs(matcher)); + } + + // Output + var owners = config.Matchers.Select(x => $"'{x.Owner}'"); + var joinedOwners = string.Join(", ", owners); + // todo: loc + this.Output($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline."); + } + } + + // Remove issue matcher + public void RemoveMatchers(IEnumerable owners) + { + var root = Root; + var distinctOwners = new HashSet(owners, StringComparer.OrdinalIgnoreCase); + var removedMatchers = new List(); + var newMatchers = new List(); + + // Lock + lock (root._matchersLock) + { + // Remove + var existingMatchers = root._matchers ?? Array.Empty(); + foreach (var matcher in existingMatchers) + { + if (distinctOwners.Contains(matcher.Owner)) + { + removedMatchers.Add(matcher); + } + else + { + newMatchers.Add(matcher); + } + } + + // Store + root._matchers = newMatchers.ToArray(); + + // Fire events + foreach (var removedMatcher in removedMatchers) + { + root._onMatcherChanged(null, new MatcherChangedEventArgs(new IssueMatcherConfig { Owner = removedMatcher.Owner })); + } + + // Output + owners = removedMatchers.Select(x => $"'{x.Owner}'"); + var joinedOwners = string.Join(", ", owners); + // todo: loc + this.Output($"Removed matchers: {joinedOwners}"); + } + } + + // Get issue matchers + public IEnumerable GetMatchers() + { + // Lock not required since the list is immutable + return Root._matchers ?? Array.Empty(); + } + + private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order) + { + _mainTimelineId = timelineId; + _record.Id = timelineRecordId; + _record.RecordType = recordType; + _record.Name = displayName; + _record.RefName = refName; + _record.Order = order; + _record.PercentComplete = 0; + _record.State = TimelineRecordState.Pending; + _record.ErrorCount = 0; + _record.WarningCount = 0; + + if (parentTimelineRecordId != null && parentTimelineRecordId.Value != Guid.Empty) + { + _record.ParentId = parentTimelineRecordId; + } + + var configuration = HostContext.GetService(); + _record.WorkerName = configuration.GetSettings().AgentName; + + _jobServerQueue.QueueTimelineRecordUpdate(_mainTimelineId, _record); + } + + private void JobServerQueueThrottling_EventReceived(object sender, ThrottlingEventArgs data) + { + Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds)); + + if (!_throttlingReported) + { + this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads.")); + + if (!String.IsNullOrEmpty(this.Variables.System_TFCollectionUrl)) + { + // Construct a URL to the resource utilization page, to aid the user debug throttling issues + UriBuilder uriBuilder = new UriBuilder(Variables.System_TFCollectionUrl); + NameValueCollection query = HttpUtility.ParseQueryString(uriBuilder.Query); + DateTime endTime = DateTime.UtcNow; + string queryDate = endTime.AddHours(-1).ToString("s") + "," + endTime.ToString("s"); + + uriBuilder.Path += (Variables.System_TFCollectionUrl.EndsWith("/") ? "" : "/") + "_usersSettings/usage"; + query["tab"] = "pipelines"; + query["queryDate"] = queryDate; + + // Global RU link + uriBuilder.Query = query.ToString(); + string global = $"Link to resource utilization page (global 1-hour view): {uriBuilder.ToString()}."; + + if (!String.IsNullOrEmpty(this.Variables.Build_DefinitionName)) + { + query["keywords"] = this.Variables.Build_Number; + query["definition"] = this.Variables.Build_DefinitionName; + } + + // RU link scoped for the build/release + uriBuilder.Query = query.ToString(); + this.Warning($"{global}\nLink to resource utilization page (1-hour view by pipeline): {uriBuilder.ToString()}."); + } + + _throttlingReported = true; + } + } + + private IExecutionContext CreatePostChild(string displayName, string refName, Dictionary intraActionState) + { + if (!_expandedForPostJob) + { + Trace.Info($"Reserve record order {_childTimelineRecordOrder + 1} to {_childTimelineRecordOrder * 2} for post job actions."); + _expandedForPostJob = true; + _childTimelineRecordOrder = _childTimelineRecordOrder * 2; + } + + return CreateChild(Guid.NewGuid(), displayName, refName, null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count); + } + } + + // The Error/Warning/etc methods are created as extension methods to simplify unit testing. + // Otherwise individual overloads would need to be implemented (depending on the unit test). + public static class ExecutionContextExtension + { + public static void Error(this IExecutionContext context, Exception ex) + { + context.Error(ex.Message); + context.Debug(ex.ToString()); + } + + // Do not add a format string overload. See comment on ExecutionContext.Write(). + public static void Error(this IExecutionContext context, string message) + { + context.AddIssue(new Issue() { Type = IssueType.Error, Message = message }); + } + + // Do not add a format string overload. See comment on ExecutionContext.Write(). + public static void Warning(this IExecutionContext context, string message) + { + context.AddIssue(new Issue() { Type = IssueType.Warning, Message = message }); + } + + // Do not add a format string overload. See comment on ExecutionContext.Write(). + public static void Output(this IExecutionContext context, string message) + { + context.Write(null, message); + } + + // Do not add a format string overload. See comment on ExecutionContext.Write(). + public static void Command(this IExecutionContext context, string message) + { + context.Write(WellKnownTags.Command, message); + } + + // + // Verbose output is enabled by setting ACTIONS_STEP_DEBUG + // It's meant to help the end user debug their definitions. + // Why are my inputs not working? It's not meant for dev debugging which is diag + // + // Do not add a format string overload. See comment on ExecutionContext.Write(). + public static void Debug(this IExecutionContext context, string message) + { + if (context.WriteDebug) + { + var multilines = message?.Replace("\r\n", "\n")?.Split("\n"); + if (multilines != null) + { + foreach (var line in multilines) + { + context.Write(WellKnownTags.Debug, line); + } + } + } + } + + public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context) + { + return new TemplateTraceWriter(context); + } + } + + internal sealed class TemplateTraceWriter : ObjectTemplating.ITraceWriter + { + private readonly IExecutionContext _executionContext; + + internal TemplateTraceWriter(IExecutionContext executionContext) + { + _executionContext = executionContext; + } + + public void Error(string format, params Object[] args) + { + _executionContext.Error(string.Format(CultureInfo.CurrentCulture, format, args)); + } + + public void Info(string format, params Object[] args) + { + _executionContext.Debug(string.Format(CultureInfo.CurrentCulture, $"{format}", args)); + } + + public void Verbose(string format, params Object[] args) + { + // todo: switch to verbose? + _executionContext.Debug(string.Format(CultureInfo.CurrentCulture, $"{format}", args)); + } + } + + public static class WellKnownTags + { + public static readonly string Section = "##[section]"; + public static readonly string Command = "##[command]"; + public static readonly string Error = "##[error]"; + public static readonly string Warning = "##[warning]"; + public static readonly string Debug = "##[debug]"; + } +} diff --git a/src/Runner.Worker/ExpressionManager.cs b/src/Runner.Worker/ExpressionManager.cs new file mode 100644 index 00000000000..b5218a806fd --- /dev/null +++ b/src/Runner.Worker/ExpressionManager.cs @@ -0,0 +1,162 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating; +using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(ExpressionManager))] + public interface IExpressionManager : IRunnerService + { + ConditionResult Evaluate(IExecutionContext context, string condition, bool hostTracingOnly = false); + } + + public sealed class ExpressionManager : RunnerService, IExpressionManager + { + public ConditionResult Evaluate(IExecutionContext executionContext, string condition, bool hostTracingOnly = false) + { + ArgUtil.NotNull(executionContext, nameof(executionContext)); + + ConditionResult result = new ConditionResult(); + var expressionTrace = new TraceWriter(Trace, hostTracingOnly ? null : executionContext); + var tree = Parse(executionContext, expressionTrace, condition); + var expressionResult = tree.Evaluate(expressionTrace, HostContext.SecretMasker, state: executionContext, options: null); + result.Value = expressionResult.IsTruthy; + result.Trace = expressionTrace.Trace; + + return result; + } + + private static IExpressionNode Parse(IExecutionContext executionContext, TraceWriter expressionTrace, string condition) + { + ArgUtil.NotNull(executionContext, nameof(executionContext)); + + if (string.IsNullOrWhiteSpace(condition)) + { + condition = $"{PipelineTemplateConstants.Success}()"; + } + + var parser = new ExpressionParser(); + var namedValues = executionContext.ExpressionValues.Keys.Select(x => new NamedValueInfo(x)).ToArray(); + var functions = new IFunctionInfo[] + { + new FunctionInfo(name: Constants.Expressions.Always, minParameters: 0, maxParameters: 0), + new FunctionInfo(name: Constants.Expressions.Cancelled, minParameters: 0, maxParameters: 0), + new FunctionInfo(name: Constants.Expressions.Failure, minParameters: 0, maxParameters: 0), + new FunctionInfo(name: Constants.Expressions.Success, minParameters: 0, maxParameters: 0), + }; + return parser.CreateTree(condition, expressionTrace, namedValues, functions) ?? new SuccessNode(); + } + + private sealed class TraceWriter : DistributedTask.Expressions2.ITraceWriter + { + private readonly IExecutionContext _executionContext; + private readonly Tracing _trace; + private readonly StringBuilder _traceBuilder = new StringBuilder(); + + public string Trace => _traceBuilder.ToString(); + + public TraceWriter(Tracing trace, IExecutionContext executionContext) + { + ArgUtil.NotNull(trace, nameof(trace)); + _trace = trace; + _executionContext = executionContext; + } + + public void Info(string message) + { + _trace.Info(message); + _executionContext?.Debug(message); + _traceBuilder.AppendLine(message); + } + + public void Verbose(string message) + { + _trace.Verbose(message); + _executionContext?.Debug(message); + } + } + + private sealed class AlwaysNode : Function + { + protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory) + { + resultMemory = null; + return true; + } + } + + private sealed class CancelledNode : Function + { + protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory) + { + resultMemory = null; + var executionContext = evaluationContext.State as IExecutionContext; + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success; + return jobStatus == ActionResult.Cancelled; + } + } + + private sealed class FailureNode : Function + { + protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory) + { + resultMemory = null; + var executionContext = evaluationContext.State as IExecutionContext; + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success; + return jobStatus == ActionResult.Failure; + } + } + + private sealed class SuccessNode : Function + { + protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory) + { + resultMemory = null; + var executionContext = evaluationContext.State as IExecutionContext; + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success; + return jobStatus == ActionResult.Success; + } + } + + private sealed class ContextValueNode : NamedValue + { + protected override Object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory) + { + resultMemory = null; + var jobContext = evaluationContext.State as IExecutionContext; + ArgUtil.NotNull(jobContext, nameof(jobContext)); + return jobContext.ExpressionValues[Name]; + } + } + } + + public class ConditionResult + { + public ConditionResult(bool value = false, string trace = null) + { + this.Value = value; + this.Trace = trace; + } + + public bool Value { get; set; } + public string Trace { get; set; } + + public static implicit operator ConditionResult(bool value) + { + return new ConditionResult(value); + } + } +} diff --git a/src/Runner.Worker/GitHubContext.cs b/src/Runner.Worker/GitHubContext.cs new file mode 100644 index 00000000000..77df9cfc6f6 --- /dev/null +++ b/src/Runner.Worker/GitHubContext.cs @@ -0,0 +1,35 @@ +using GitHub.DistributedTask.Pipelines.ContextData; +using System; +using System.Collections.Generic; + +namespace GitHub.Runner.Worker +{ + public sealed class GitHubContext : DictionaryContextData, IEnvironmentContextData + { + private readonly HashSet _contextEnvWhitelist = new HashSet(StringComparer.OrdinalIgnoreCase) + { + "action", + "actor", + "base_ref", + "event_name", + "event_path", + "head_ref", + "ref", + "repository", + "sha", + "workflow", + "workspace", + }; + + public IEnumerable> GetRuntimeEnvironmentVariables() + { + foreach (var data in this) + { + if (_contextEnvWhitelist.Contains(data.Key) && data.Value is StringContextData value) + { + yield return new KeyValuePair($"GITHUB_{data.Key.ToUpperInvariant()}", value); + } + } + } + } +} \ No newline at end of file diff --git a/src/Runner.Worker/Handlers/ContainerActionHandler.cs b/src/Runner.Worker/Handlers/ContainerActionHandler.cs new file mode 100644 index 00000000000..53be0ce27c1 --- /dev/null +++ b/src/Runner.Worker/Handlers/ContainerActionHandler.cs @@ -0,0 +1,203 @@ +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using System; +using GitHub.Runner.Worker.Container; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.DistributedTask.WebApi; +using GitHub.DistributedTask.Pipelines.ContextData; +using System.Linq; + +namespace GitHub.Runner.Worker.Handlers +{ + [ServiceLocator(Default = typeof(ContainerActionHandler))] + public interface IContainerActionHandler : IHandler + { + ContainerActionExecutionData Data { get; set; } + } + + public sealed class ContainerActionHandler : Handler, IContainerActionHandler + { + public ContainerActionExecutionData Data { get; set; } + + public async Task RunAsync(ActionRunStage stage) + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(Data, nameof(Data)); + ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); + +#if OS_WINDOWS || OS_OSX + throw new NotSupportedException($"Container action is only supported on Linux"); +#else + // Update the env dictionary. + AddInputsToEnvironment(); + + var dockerManger = HostContext.GetService(); + + // container image haven't built/pull + if (Data.Image.StartsWith("docker://", StringComparison.OrdinalIgnoreCase)) + { + Data.Image = Data.Image.Substring("docker://".Length); + } + else if (Data.Image.EndsWith("Dockerfile") || Data.Image.EndsWith("dockerfile")) + { + // ensure docker file exist + var dockerFile = Path.Combine(ActionDirectory, Data.Image); + ArgUtil.File(dockerFile, nameof(Data.Image)); + ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'."); + + var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}"; + var buildExitCode = await dockerManger.DockerBuild(ExecutionContext, ExecutionContext.GetGitHubContext("workspace"), Directory.GetParent(dockerFile).FullName, imageName); + if (buildExitCode != 0) + { + throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}"); + } + + Data.Image = imageName; + } + + // run container + var container = new ContainerInfo() + { + ContainerImage = Data.Image, + ContainerName = ExecutionContext.Id.ToString("N"), + ContainerDisplayName = $"{Pipelines.Validation.NameValidation.Sanitize(Data.Image)}_{Guid.NewGuid().ToString("N").Substring(0, 6)}", + }; + + if (stage == ActionRunStage.Main) + { + if (!string.IsNullOrEmpty(Data.EntryPoint)) + { + // use entrypoint from action.yml + container.ContainerEntryPoint = Data.EntryPoint; + } + else + { + // use entrypoint input, this is for action v1 which doesn't have action.yml + container.ContainerEntryPoint = Inputs.GetValueOrDefault("entryPoint"); + } + } + else if (stage == ActionRunStage.Post) + { + container.ContainerEntryPoint = Data.Cleanup; + } + + // create inputs context for template evaluation + var inputsContext = new DictionaryContextData(); + if (this.Inputs != null) + { + foreach (var input in Inputs) + { + inputsContext.Add(input.Key, new StringContextData(input.Value)); + } + } + + var evaluateContext = new Dictionary(StringComparer.OrdinalIgnoreCase); + evaluateContext["inputs"] = inputsContext; + + var manifestManager = HostContext.GetService(); + if (Data.Arguments != null) + { + container.ContainerEntryPointArgs = ""; + var evaluatedArgs = manifestManager.EvaluateContainerArguments(ExecutionContext, Data.Arguments, evaluateContext); + foreach (var arg in evaluatedArgs) + { + if (!string.IsNullOrEmpty(arg)) + { + container.ContainerEntryPointArgs = container.ContainerEntryPointArgs + $" \"{arg.Replace("\"", "\\\"")}\""; + } + else + { + container.ContainerEntryPointArgs = container.ContainerEntryPointArgs + " \"\""; + } + } + } + else + { + container.ContainerEntryPointArgs = Inputs.GetValueOrDefault("args"); + } + + if (Data.Environment != null) + { + var evaluatedEnv = manifestManager.EvaluateContainerEnvironment(ExecutionContext, Data.Environment, evaluateContext); + foreach (var env in evaluatedEnv) + { + if (!this.Environment.ContainsKey(env.Key)) + { + this.Environment[env.Key] = env.Value; + } + } + } + + if (ExecutionContext.JobContext.Container.TryGetValue("network", out var networkContextData) && networkContextData is StringContextData networkStringData) + { + container.ContainerNetwork = networkStringData.ToString(); + } + + var defaultWorkingDirectory = ExecutionContext.GetGitHubContext("workspace"); + var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp); + + ArgUtil.NotNullOrEmpty(defaultWorkingDirectory, nameof(defaultWorkingDirectory)); + ArgUtil.NotNullOrEmpty(tempDirectory, nameof(tempDirectory)); + + var tempHomeDirectory = Path.Combine(tempDirectory, "_github_home"); + Directory.CreateDirectory(tempHomeDirectory); + this.Environment["HOME"] = tempHomeDirectory; + + var tempWorkflowDirectory = Path.Combine(tempDirectory, "_github_workflow"); + ArgUtil.Directory(tempWorkflowDirectory, nameof(tempWorkflowDirectory)); + + container.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock")); + container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home")); + container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow")); + container.MountVolumes.Add(new MountVolume(defaultWorkingDirectory, "/github/workspace")); + + container.AddPathTranslateMapping(tempHomeDirectory, "/github/home"); + container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow"); + container.AddPathTranslateMapping(defaultWorkingDirectory, "/github/workspace"); + + container.ContainerWorkDirectory = "/github/workspace"; + + // expose context to environment + foreach (var context in ExecutionContext.ExpressionValues) + { + if (context.Value is IEnvironmentContextData runtimeContext && runtimeContext != null) + { + foreach (var env in runtimeContext.GetRuntimeEnvironmentVariables()) + { + Environment[env.Key] = env.Value; + } + } + } + + // Add Actions Runtime server info + var systemConnection = ExecutionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + Environment["ACTIONS_RUNTIME_URL"] = systemConnection.Url.AbsoluteUri; + Environment["ACTIONS_RUNTIME_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken]; + if (systemConnection.Data.TryGetValue("CacheServerUrl", out var cacheUrl) && !string.IsNullOrEmpty(cacheUrl)) + { + Environment["ACTIONS_CACHE_URL"] = cacheUrl; + } + + foreach (var variable in this.Environment) + { + container.ContainerEnvironmentVariables[variable.Key] = container.TranslateToContainerPath(variable.Value); + } + + using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager)) + using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager)) + { + var runExitCode = await dockerManger.DockerRun(ExecutionContext, container, stdoutManager.OnDataReceived, stderrManager.OnDataReceived); + if (runExitCode != 0) + { + ExecutionContext.Error($"Docker run failed with exit code {runExitCode}"); + ExecutionContext.Result = TaskResult.Failed; + } + } +#endif + } + } +} diff --git a/src/Runner.Worker/Handlers/Handler.cs b/src/Runner.Worker/Handlers/Handler.cs new file mode 100644 index 00000000000..11e30e62a3a --- /dev/null +++ b/src/Runner.Worker/Handlers/Handler.cs @@ -0,0 +1,177 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using System.Linq; +using System.IO; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker.Handlers +{ + public interface IHandler : IRunnerService + { + Pipelines.ActionStepDefinitionReference Action { get; set; } + Dictionary Environment { get; set; } + IExecutionContext ExecutionContext { get; set; } + Variables RuntimeVariables { get; set; } + IStepHost StepHost { get; set; } + Dictionary Inputs { get; set; } + string ActionDirectory { get; set; } + Task RunAsync(ActionRunStage stage); + void PrintActionDetails(ActionRunStage stage); + } + + public abstract class Handler : RunnerService + { +#if OS_WINDOWS + // In windows OS the maximum supported size of a environment variable value is 32k. + // You can set environment variable greater then 32K, but that variable will not be able to read in node.exe. + private const int _environmentVariableMaximumSize = 32766; +#endif + + protected IActionCommandManager ActionCommandManager { get; private set; } + + public Pipelines.ActionStepDefinitionReference Action { get; set; } + public Dictionary Environment { get; set; } + public Variables RuntimeVariables { get; set; } + public IExecutionContext ExecutionContext { get; set; } + public IStepHost StepHost { get; set; } + public Dictionary Inputs { get; set; } + public string ActionDirectory { get; set; } + + public virtual void PrintActionDetails(ActionRunStage stage) + { + if (stage == ActionRunStage.Post) + { + ExecutionContext.Output($"Post job cleanup."); + return; + } + + string groupName = ""; + if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry) + { + var registryAction = Action as Pipelines.ContainerRegistryReference; + groupName = $"Run docker://{registryAction.Image}"; + } + else if (Action.Type == Pipelines.ActionSourceType.Repository) + { + var repoAction = Action as Pipelines.RepositoryPathReference; + if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase)) + { + groupName = $"Run {repoAction.Path}"; + } + else + { + if (string.IsNullOrEmpty(repoAction.Path)) + { + groupName = $"Run {repoAction.Name}@{repoAction.Ref}"; + } + else + { + groupName = $"Run {repoAction.Name}/{repoAction.Path}@{repoAction.Ref}"; + } + } + } + else + { + // this should never happen + Trace.Error($"Can't generate default folding group name for action {Action.Type.ToString()}"); + groupName = "Action details"; + } + + ExecutionContext.Output($"##[group]{groupName}"); + + if (this.Inputs?.Count > 0) + { + ExecutionContext.Output("with:"); + foreach (var input in this.Inputs) + { + if (!string.IsNullOrEmpty(input.Value)) + { + ExecutionContext.Output($" {input.Key}: {input.Value}"); + } + } + } + + if (this.Environment?.Count > 0) + { + ExecutionContext.Output("env:"); + foreach (var env in this.Environment) + { + ExecutionContext.Output($" {env.Key}: {env.Value}"); + } + } + + ExecutionContext.Output("##[endgroup]"); + } + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + ActionCommandManager = hostContext.CreateService(); + } + + protected void AddInputsToEnvironment() + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(Inputs, nameof(Inputs)); + + // Add the inputs to the environment variable dictionary. + foreach (KeyValuePair pair in Inputs) + { + AddEnvironmentVariable( + key: $"INPUT_{pair.Key?.Replace(' ', '_').ToUpperInvariant()}", + value: pair.Value); + } + } + + protected void AddEnvironmentVariable(string key, string value) + { + ArgUtil.NotNullOrEmpty(key, nameof(key)); + Trace.Verbose($"Setting env '{key}' to '{value}'."); + + Environment[key] = value ?? string.Empty; + +#if OS_WINDOWS + if (Environment[key].Length > _environmentVariableMaximumSize) + { + ExecutionContext.Warning($"Environment variable '{key}' exceeds the maximum supported length. Environment variable length: {value.Length} , Maximum supported length: {_environmentVariableMaximumSize}"); + } +#endif + } + + protected void AddPrependPathToEnvironment() + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(ExecutionContext.PrependPath, nameof(ExecutionContext.PrependPath)); + if (ExecutionContext.PrependPath.Count == 0) + { + return; + } + + // Prepend path. + string prepend = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse()); + var containerStepHost = StepHost as ContainerStepHost; + if (containerStepHost != null) + { + containerStepHost.PrependPath = prepend; + } + else + { + string taskEnvPATH; + Environment.TryGetValue(Constants.PathVariable, out taskEnvPATH); + string originalPath = RuntimeVariables.Get(Constants.PathVariable) ?? // Prefer a job variable. + taskEnvPATH ?? // Then a task-environment variable. + System.Environment.GetEnvironmentVariable(Constants.PathVariable) ?? // Then an environment variable. + string.Empty; + string newPath = PathUtil.PrependPath(prepend, originalPath); + AddEnvironmentVariable(Constants.PathVariable, newPath); + } + } + } +} diff --git a/src/Runner.Worker/Handlers/HandlerFactory.cs b/src/Runner.Worker/Handlers/HandlerFactory.cs new file mode 100644 index 00000000000..2aac69d5a02 --- /dev/null +++ b/src/Runner.Worker/Handlers/HandlerFactory.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker.Handlers +{ + [ServiceLocator(Default = typeof(HandlerFactory))] + public interface IHandlerFactory : IRunnerService + { + IHandler Create( + IExecutionContext executionContext, + Pipelines.ActionStepDefinitionReference action, + IStepHost stepHost, + ActionExecutionData data, + Dictionary inputs, + Dictionary environment, + Variables runtimeVariables, + string actionDirectory); + } + + public sealed class HandlerFactory : RunnerService, IHandlerFactory + { + public IHandler Create( + IExecutionContext executionContext, + Pipelines.ActionStepDefinitionReference action, + IStepHost stepHost, + ActionExecutionData data, + Dictionary inputs, + Dictionary environment, + Variables runtimeVariables, + string actionDirectory) + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ArgUtil.NotNull(stepHost, nameof(stepHost)); + ArgUtil.NotNull(data, nameof(data)); + ArgUtil.NotNull(inputs, nameof(inputs)); + ArgUtil.NotNull(environment, nameof(environment)); + ArgUtil.NotNull(runtimeVariables, nameof(runtimeVariables)); + + // Create the handler. + IHandler handler; + if (data.ExecutionType == ActionExecutionType.Container) + { + handler = HostContext.CreateService(); + (handler as IContainerActionHandler).Data = data as ContainerActionExecutionData; + } + else if (data.ExecutionType == ActionExecutionType.NodeJS) + { + handler = HostContext.CreateService(); + (handler as INodeScriptActionHandler).Data = data as NodeJSActionExecutionData; + } + else if (data.ExecutionType == ActionExecutionType.Script) + { + handler = HostContext.CreateService(); + (handler as IScriptHandler).Data = data as ScriptActionExecutionData; + } + else if (data.ExecutionType == ActionExecutionType.Plugin) + { + // Agent plugin + handler = HostContext.CreateService(); + (handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData; + } + else + { + // This should never happen. + throw new NotSupportedException(data.ExecutionType.ToString()); + } + + handler.Action = action; + handler.Environment = environment; + handler.RuntimeVariables = runtimeVariables; + handler.ExecutionContext = executionContext; + handler.StepHost = stepHost; + handler.Inputs = inputs; + handler.ActionDirectory = actionDirectory; + return handler; + } + } +} diff --git a/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs b/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs new file mode 100644 index 00000000000..911265e3c75 --- /dev/null +++ b/src/Runner.Worker/Handlers/NodeScriptActionHandler.cs @@ -0,0 +1,134 @@ +using System.IO; +using System.Text; +using System.Threading.Tasks; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using System; +using System.Linq; + +namespace GitHub.Runner.Worker.Handlers +{ + [ServiceLocator(Default = typeof(NodeScriptActionHandler))] + public interface INodeScriptActionHandler : IHandler + { + NodeJSActionExecutionData Data { get; set; } + } + + public sealed class NodeScriptActionHandler : Handler, INodeScriptActionHandler + { + public NodeJSActionExecutionData Data { get; set; } + + public async Task RunAsync(ActionRunStage stage) + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(Data, nameof(Data)); + ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); + ArgUtil.NotNull(Inputs, nameof(Inputs)); + ArgUtil.Directory(ActionDirectory, nameof(ActionDirectory)); + + // Update the env dictionary. + AddInputsToEnvironment(); + AddPrependPathToEnvironment(); + + // expose context to environment + foreach (var context in ExecutionContext.ExpressionValues) + { + if (context.Value is IEnvironmentContextData runtimeContext && runtimeContext != null) + { + foreach (var env in runtimeContext.GetRuntimeEnvironmentVariables()) + { + Environment[env.Key] = env.Value; + } + } + } + + // Add Actions Runtime server info + var systemConnection = ExecutionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + Environment["ACTIONS_RUNTIME_URL"] = systemConnection.Url.AbsoluteUri; + Environment["ACTIONS_RUNTIME_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken]; + if (systemConnection.Data.TryGetValue("CacheServerUrl", out var cacheUrl) && !string.IsNullOrEmpty(cacheUrl)) + { + Environment["ACTIONS_CACHE_URL"] = cacheUrl; + } + + // Resolve the target script. + string target = null; + if (stage == ActionRunStage.Main) + { + target = Data.Script; + } + else if (stage == ActionRunStage.Post) + { + target = Data.Cleanup; + } + + ArgUtil.NotNullOrEmpty(target, nameof(target)); + target = Path.Combine(ActionDirectory, target); + ArgUtil.File(target, nameof(target)); + + // Resolve the working directory. + string workingDirectory = ExecutionContext.GetGitHubContext("workspace"); + if (string.IsNullOrEmpty(workingDirectory)) + { + workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work); + } + + var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext); + string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}"); + + // Format the arguments passed to node. + // 1) Wrap the script file path in double quotes. + // 2) Escape double quotes within the script file path. Double-quote is a valid + // file name character on Linux. + string arguments = StepHost.ResolvePathForStepHost(StringUtil.Format(@"""{0}""", target.Replace(@"""", @"\"""))); + +#if OS_WINDOWS + // It appears that node.exe outputs UTF8 when not in TTY mode. + Encoding outputEncoding = Encoding.UTF8; +#else + // Let .NET choose the default. + Encoding outputEncoding = null; +#endif + + using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager)) + using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager)) + { + StepHost.OutputDataReceived += stdoutManager.OnDataReceived; + StepHost.ErrorDataReceived += stderrManager.OnDataReceived; + + // Execute the process. Exit code 0 should always be returned. + // A non-zero exit code indicates infrastructural failure. + // Task failure should be communicated over STDOUT using ## commands. + Task step = StepHost.ExecuteAsync(workingDirectory: StepHost.ResolvePathForStepHost(workingDirectory), + fileName: StepHost.ResolvePathForStepHost(file), + arguments: arguments, + environment: Environment, + requireExitCodeZero: false, + outputEncoding: outputEncoding, + killProcessOnCancel: false, + inheritConsoleHandler: !ExecutionContext.Variables.Retain_Default_Encoding, + cancellationToken: ExecutionContext.CancellationToken); + + // Wait for either the node exit or force finish through ##vso command + await System.Threading.Tasks.Task.WhenAny(step, ExecutionContext.ForceCompleted); + + if (ExecutionContext.ForceCompleted.IsCompleted) + { + ExecutionContext.Debug("The task was marked as \"done\", but the process has not closed after 5 seconds. Treating the task as complete."); + } + else + { + var exitCode = await step; + if (exitCode != 0) + { + ExecutionContext.Error($"Node run failed with exit code {exitCode}"); + ExecutionContext.Result = TaskResult.Failed; + } + } + } + } + } +} diff --git a/src/Runner.Worker/Handlers/OutputManager.cs b/src/Runner.Worker/Handlers/OutputManager.cs new file mode 100644 index 00000000000..0a06bf9566c --- /dev/null +++ b/src/Runner.Worker/Handlers/OutputManager.cs @@ -0,0 +1,319 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using DTWebApi = GitHub.DistributedTask.WebApi; + +namespace GitHub.Runner.Worker.Handlers +{ + public sealed class OutputManager : IDisposable + { + private const string _colorCodePrefix = "\033["; + private const int _maxAttempts = 3; + private const string _timeoutKey = "GITHUB_ACTIONS_RUNNER_ISSUE_MATCHER_TIMEOUT"; + private static readonly Regex _colorCodeRegex = new Regex(@"\x0033\[[0-9;]*m?", RegexOptions.Compiled | RegexOptions.CultureInvariant); + private readonly IActionCommandManager _commandManager; + private readonly IExecutionContext _executionContext; + private readonly object _matchersLock = new object(); + private readonly TimeSpan _timeout; + private IssueMatcher[] _matchers = Array.Empty(); + + public OutputManager(IExecutionContext executionContext, IActionCommandManager commandManager) + { + //executionContext.Debug("ENTERING OutputManager ctor"); + _executionContext = executionContext; + _commandManager = commandManager; + + //_executionContext.Debug("OutputManager ctor - determine timeout from variable"); + // Determine the timeout + var timeoutStr = _executionContext.Variables.Get(_timeoutKey); + if (string.IsNullOrEmpty(timeoutStr) || + !TimeSpan.TryParse(timeoutStr, CultureInfo.InvariantCulture, out _timeout) || + _timeout <= TimeSpan.Zero) + { + //_executionContext.Debug("OutputManager ctor - determine timeout from env var"); + timeoutStr = Environment.GetEnvironmentVariable(_timeoutKey); + if (string.IsNullOrEmpty(timeoutStr) || + !TimeSpan.TryParse(timeoutStr, CultureInfo.InvariantCulture, out _timeout) || + _timeout <= TimeSpan.Zero) + { + //_executionContext.Debug("OutputManager ctor - set timeout to default"); + _timeout = TimeSpan.FromSeconds(1); + } + } + + //_executionContext.Debug("OutputManager ctor - adding matchers"); + // Lock + lock (_matchersLock) + { + //_executionContext.Debug("OutputManager ctor - adding OnMatcherChanged"); + _executionContext.Add(OnMatcherChanged); + //_executionContext.Debug("OutputManager ctor - getting matchers"); + _matchers = _executionContext.GetMatchers().Select(x => new IssueMatcher(x, _timeout)).ToArray(); + } + //_executionContext.Debug("LEAVING OutputManager ctor"); + } + + public void Dispose() + { + try + { + _executionContext.Remove(OnMatcherChanged); + } + catch + { + } + } + + public void OnDataReceived(object sender, ProcessDataReceivedEventArgs e) + { + //_executionContext.Debug("ENTERING OutputManager OnDataReceived"); + var line = e.Data; + + // ## commands + if (!String.IsNullOrEmpty(line) && + (line.IndexOf(ActionCommand.Prefix) >= 0 || line.IndexOf(ActionCommand._commandKey) >= 0)) + { + // This does not need to be inside of a critical section. + // The logging queues and command handlers are thread-safe. + if (_commandManager.TryProcessCommand(_executionContext, line)) + { + //_executionContext.Debug("LEAVING OutputManager OnDataReceived - command processed"); + return; + } + } + + // Problem matchers + if (_matchers.Length > 0) + { + // Copy the reference + var matchers = _matchers; + + // Strip color codes + var stripped = line.Contains(_colorCodePrefix) ? _colorCodeRegex.Replace(line, string.Empty) : line; + + foreach (var matcher in matchers) + { + IssueMatch match = null; + for (var attempt = 1; attempt <= _maxAttempts; attempt++) + { + // Match + try + { + match = matcher.Match(stripped); + + break; + } + catch (RegexMatchTimeoutException ex) + { + if (attempt < _maxAttempts) + { + // Debug + _executionContext.Debug($"Timeout processing issue matcher '{matcher.Owner}' against line '{stripped}'. Exception: {ex.ToString()}"); + } + else + { + // Warn + _executionContext.Warning($"Removing issue matcher '{matcher.Owner}'. Matcher failed {_maxAttempts} times. Error: {ex.Message}"); + + // Remove + Remove(matcher); + } + } + } + + if (match != null) + { + // Reset other matchers + foreach (var otherMatcher in matchers.Where(x => !object.ReferenceEquals(x, matcher))) + { + otherMatcher.Reset(); + } + + // Convert to issue + var issue = ConvertToIssue(match); + + if (issue != null) + { + // Log issue + _executionContext.AddIssue(issue, stripped); + + //_executionContext.Debug("LEAVING OutputManager OnDataReceived - issue logged"); + return; + } + } + } + } + + // Regular output + _executionContext.Output(line); + //_executionContext.Debug("LEAVING OutputManager OnDataReceived"); + } + + private void OnMatcherChanged(object sender, MatcherChangedEventArgs e) + { + // Lock + lock (_matchersLock) + { + var newMatchers = new List(); + + // Prepend + if (e.Config.Patterns.Length > 0) + { + newMatchers.Add(new IssueMatcher(e.Config, _timeout)); + } + + // Add existing non-matching + newMatchers.AddRange(_matchers.Where(x => !string.Equals(x.Owner, e.Config.Owner, StringComparison.OrdinalIgnoreCase))); + + // Store + _matchers = newMatchers.ToArray(); + } + } + + private void Remove(IssueMatcher matcher) + { + // Lock + lock (_matchersLock) + { + var newMatchers = new List(); + + // Match by object reference, not by owner name + newMatchers.AddRange(_matchers.Where(x => !object.ReferenceEquals(x, matcher))); + + // Store + _matchers = newMatchers.ToArray(); + } + } + + private DTWebApi.Issue ConvertToIssue(IssueMatch match) + { + // Validate the message + if (string.IsNullOrWhiteSpace(match.Message)) + { + _executionContext.Debug("Skipping logging an issue for the matched line because the message is empty."); + return null; + } + + // Validate the severity + DTWebApi.IssueType issueType; + if (string.IsNullOrEmpty(match.Severity) || string.Equals(match.Severity, "error", StringComparison.OrdinalIgnoreCase)) + { + issueType = DTWebApi.IssueType.Error; + } + else if (string.Equals(match.Severity, "warning", StringComparison.OrdinalIgnoreCase)) + { + issueType = DTWebApi.IssueType.Warning; + } + else + { + _executionContext.Debug($"Skipped logging an issue for the matched line because the severity '{match.Severity}' is not supported."); + return null; + } + + var issue = new DTWebApi.Issue + { + Message = match.Message, + Type = issueType, + }; + + // Line + if (!string.IsNullOrEmpty(match.Line)) + { + if (int.TryParse(match.Line, NumberStyles.None, CultureInfo.InvariantCulture, out var line)) + { + issue.Data["line"] = line.ToString(CultureInfo.InvariantCulture); + } + else + { + _executionContext.Debug($"Unable to parse line number '{match.Line}'"); + } + } + + // Column + if (!string.IsNullOrEmpty(match.Column)) + { + if (int.TryParse(match.Column, NumberStyles.None, CultureInfo.InvariantCulture, out var column)) + { + issue.Data["col"] = column.ToString(CultureInfo.InvariantCulture); + } + else + { + _executionContext.Debug($"Unable to parse column number '{match.Column}'"); + } + } + + // Code + if (!string.IsNullOrWhiteSpace(match.Code)) + { + issue.Data["code"] = match.Code.Trim(); + } + + // File + try + { + if (!string.IsNullOrWhiteSpace(match.File)) + { + var file = match.File; + + // Root using fromPath + if (!string.IsNullOrWhiteSpace(match.FromPath) && !Path.IsPathRooted(file)) + { + file = Path.Combine(match.FromPath, file); + } + + // Root using system.defaultWorkingDirectory + if (!Path.IsPathRooted(file)) + { + var githubContext = _executionContext.ExpressionValues["github"] as GitHubContext; + ArgUtil.NotNull(githubContext, nameof(githubContext)); + var workspace = githubContext["workspace"].ToString(); + ArgUtil.NotNullOrEmpty(workspace, "workspace"); + + file = Path.Combine(workspace, file); + } + + // Normalize slashes + file = file.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar); + + // File exists + if (File.Exists(file)) + { + // Repository path + var repositoryPath = _executionContext.GetGitHubContext("workspace"); + ArgUtil.NotNullOrEmpty(repositoryPath, nameof(repositoryPath)); + + // Normalize slashes + repositoryPath = repositoryPath.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar).TrimEnd(Path.DirectorySeparatorChar) + Path.DirectorySeparatorChar; + + if (!file.StartsWith(repositoryPath, IOUtil.FilePathStringComparison)) + { + // File is not under repo + _executionContext.Debug($"Dropping file value '{file}'. Path is not under the repo."); + } + else + { + // prefer `/` on all platforms + issue.Data["file"] = file.Substring(repositoryPath.Length).TrimStart(Path.DirectorySeparatorChar).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } + } + // File does not exist + else + { + _executionContext.Debug($"Dropping file value '{file}'. Path does not exist"); + } + } + } + catch (Exception ex) + { + _executionContext.Debug($"Dropping file value '{match.File}' and fromPath value '{match.FromPath}'. Exception during validation: {ex.ToString()}"); + } + + return issue; + } + } +} diff --git a/src/Runner.Worker/Handlers/RunnerPluginHandler.cs b/src/Runner.Worker/Handlers/RunnerPluginHandler.cs new file mode 100644 index 00000000000..c082fe9fcf3 --- /dev/null +++ b/src/Runner.Worker/Handlers/RunnerPluginHandler.cs @@ -0,0 +1,58 @@ +using System.Threading.Tasks; +using System; +using GitHub.Runner.Sdk; +using GitHub.Runner.Common; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Worker.Handlers +{ + [ServiceLocator(Default = typeof(RunnerPluginHandler))] + public interface IRunnerPluginHandler : IHandler + { + PluginActionExecutionData Data { get; set; } + } + + public sealed class RunnerPluginHandler : Handler, IRunnerPluginHandler + { + public PluginActionExecutionData Data { get; set; } + + public async Task RunAsync(ActionRunStage stage) + { + // Validate args. + Trace.Entering(); + ArgUtil.NotNull(Data, nameof(Data)); + ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); + ArgUtil.NotNull(Inputs, nameof(Inputs)); + + string plugin = null; + if (stage == ActionRunStage.Main) + { + plugin = Data.Plugin; + } + else if (stage == ActionRunStage.Post) + { + plugin = Data.Cleanup; + } + + ArgUtil.NotNullOrEmpty(plugin, nameof(plugin)); + + // Update the env dictionary. + AddPrependPathToEnvironment(); + + // Make sure only particular task get run as runner plugin. + var runnerPlugin = HostContext.GetService(); + using (var outputManager = new OutputManager(ExecutionContext, ActionCommandManager)) + { + ActionCommandManager.EnablePluginInternalCommand(); + try + { + await runnerPlugin.RunPluginActionAsync(ExecutionContext, plugin, Inputs, Environment, RuntimeVariables, outputManager.OnDataReceived); + } + finally + { + ActionCommandManager.DisablePluginInternalCommand(); + } + } + } + } +} diff --git a/src/Runner.Worker/Handlers/ScriptHandler.cs b/src/Runner.Worker/Handlers/ScriptHandler.cs new file mode 100644 index 00000000000..ab6dda544e1 --- /dev/null +++ b/src/Runner.Worker/Handlers/ScriptHandler.cs @@ -0,0 +1,241 @@ +using System; +using System.IO; +using System.Text; +using System.Threading.Tasks; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Worker.Handlers +{ + [ServiceLocator(Default = typeof(ScriptHandler))] + public interface IScriptHandler : IHandler + { + ScriptActionExecutionData Data { get; set; } + } + + public sealed class ScriptHandler : Handler, IScriptHandler + { + public ScriptActionExecutionData Data { get; set; } + + public override void PrintActionDetails(ActionRunStage stage) + { + if (stage == ActionRunStage.Post) + { + throw new NotSupportedException("Script action should not have 'Post' job action."); + } + + Inputs.TryGetValue("script", out string contents); + contents = contents ?? string.Empty; + if (Action.Type == Pipelines.ActionSourceType.Script) + { + var firstLine = contents.TrimStart(' ', '\t', '\r', '\n'); + var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' }); + if (firstNewLine >= 0) + { + firstLine = firstLine.Substring(0, firstNewLine); + } + + ExecutionContext.Output($"##[group]Run {firstLine}"); + } + else + { + throw new InvalidOperationException($"Invalid action type {Action.Type} for {nameof(ScriptHandler)}"); + } + + var multiLines = contents.Replace("\r\n", "\n").TrimEnd('\n').Split('\n'); + foreach (var line in multiLines) + { + // Bright Cyan color + ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m"); + } + + string argFormat; + string shellCommand; + string shellCommandPath = null; + bool validateShellOnHost = !(StepHost is ContainerStepHost); + Inputs.TryGetValue("shell", out var shell); + if (string.IsNullOrEmpty(shell)) + { +#if OS_WINDOWS + shellCommand = "cmd"; + if(validateShellOnHost) + { + shellCommandPath = System.Environment.GetEnvironmentVariable("ComSpec"); + } +#else + shellCommand = "sh"; + if (validateShellOnHost) + { + shellCommandPath = WhichUtil.Which("bash") ?? WhichUtil.Which("sh", true, Trace); + } +#endif + argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); + } + else + { + var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell); + shellCommand = parsed.shellCommand; + if (validateShellOnHost) + { + shellCommandPath = WhichUtil.Which(parsed.shellCommand, true, Trace); + } + + argFormat = $"{parsed.shellArgs}".TrimStart(); + if (string.IsNullOrEmpty(argFormat)) + { + argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); + } + } + + if (!string.IsNullOrEmpty(shellCommandPath)) + { + ExecutionContext.Output($"shell: {shellCommandPath} {argFormat}"); + } + else + { + ExecutionContext.Output($"shell: {shellCommand} {argFormat}"); + } + + if (this.Environment?.Count > 0) + { + ExecutionContext.Output("env:"); + foreach (var env in this.Environment) + { + ExecutionContext.Output($" {env.Key}: {env.Value}"); + } + } + + ExecutionContext.Output("##[endgroup]"); + } + + public async Task RunAsync(ActionRunStage stage) + { + if (stage == ActionRunStage.Post) + { + throw new NotSupportedException("Script action should not have 'Post' job action."); + } + + // Validate args + Trace.Entering(); + ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext)); + ArgUtil.NotNull(Inputs, nameof(Inputs)); + + var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext; + ArgUtil.NotNull(githubContext, nameof(githubContext)); + + var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp); + + Inputs.TryGetValue("script", out var contents); + contents = contents ?? string.Empty; + + Inputs.TryGetValue("workingDirectory", out var workingDirectory); + var workspaceDir = githubContext["workspace"] as StringContextData; + workingDirectory = Path.Combine(workspaceDir, workingDirectory ?? string.Empty); + + Inputs.TryGetValue("shell", out var shell); + var isContainerStepHost = StepHost is ContainerStepHost; + + string commandPath, argFormat, shellCommand; + // Set up default command and arguments + if (string.IsNullOrEmpty(shell)) + { +#if OS_WINDOWS + shellCommand = "cmd"; + commandPath = System.Environment.GetEnvironmentVariable("ComSpec"); + ArgUtil.NotNullOrEmpty(commandPath, "%ComSpec%"); +#else + shellCommand = "sh"; + commandPath = WhichUtil.Which("bash", false, Trace) ?? WhichUtil.Which("sh", true, Trace); +#endif + argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); + } + else + { + var parsed = ScriptHandlerHelpers.ParseShellOptionString(shell); + shellCommand = parsed.shellCommand; + // For non-ContainerStepHost, the command must be located on the host by Which + commandPath = WhichUtil.Which(parsed.shellCommand, !isContainerStepHost, Trace); + argFormat = $"{parsed.shellArgs}".TrimStart(); + if (string.IsNullOrEmpty(argFormat)) + { + argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand); + } + } + + // No arg format was given, shell must be a built-in + if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}")) + { + throw new ArgumentException("Invalid shell option. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{0}'"); + } + + // We do not not the full path until we know what shell is being used, so that we can determine the file extension + var scriptFilePath = Path.Combine(tempDirectory, $"{Guid.NewGuid()}{ScriptHandlerHelpers.GetScriptFileExtension(shellCommand)}"); + var resolvedScriptPath = $"{StepHost.ResolvePathForStepHost(scriptFilePath).Replace("\"", "\\\"")}"; + + // Format arg string with script path + var arguments = string.Format(argFormat, resolvedScriptPath); + + // Fix up and write the script + contents = ScriptHandlerHelpers.FixUpScriptContents(shellCommand, contents); +#if OS_WINDOWS + // Normalize Windows line endings + contents = contents.Replace("\r\n", "\n").Replace("\n", "\r\n"); + var encoding = ExecutionContext.Variables.Retain_Default_Encoding && Console.InputEncoding.CodePage != 65001 + ? Console.InputEncoding + : new UTF8Encoding(false); +#else + // Don't add a BOM. It causes the script to fail on some operating systems (e.g. on Ubuntu 14). + var encoding = new UTF8Encoding(false); +#endif + // Script is written to local path (ie host) but executed relative to the StepHost, which may be a container + File.WriteAllText(scriptFilePath, contents, encoding); + + // Prepend PATH + AddPrependPathToEnvironment(); + + // expose context to environment + foreach (var context in ExecutionContext.ExpressionValues) + { + if (context.Value is IEnvironmentContextData runtimeContext && runtimeContext != null) + { + foreach (var env in runtimeContext.GetRuntimeEnvironmentVariables()) + { + Environment[env.Key] = env.Value; + } + } + } + + // dump out the command + var fileName = isContainerStepHost ? shellCommand : commandPath; + ExecutionContext.Debug($"{fileName} {arguments}"); + + using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager)) + using (var stderrManager = new OutputManager(ExecutionContext, ActionCommandManager)) + { + StepHost.OutputDataReceived += stdoutManager.OnDataReceived; + StepHost.ErrorDataReceived += stderrManager.OnDataReceived; + + // Execute + int exitCode = await StepHost.ExecuteAsync(workingDirectory: StepHost.ResolvePathForStepHost(workingDirectory), + fileName: fileName, + arguments: arguments, + environment: Environment, + requireExitCodeZero: false, + outputEncoding: null, + killProcessOnCancel: false, + inheritConsoleHandler: !ExecutionContext.Variables.Retain_Default_Encoding, + cancellationToken: ExecutionContext.CancellationToken); + + // Error + if (exitCode != 0) + { + ExecutionContext.Error($"Process completed with exit code {exitCode}."); + ExecutionContext.Result = TaskResult.Failed; + } + } + } + } +} diff --git a/src/Runner.Worker/Handlers/ScriptHandlerHelpers.cs b/src/Runner.Worker/Handlers/ScriptHandlerHelpers.cs new file mode 100644 index 00000000000..8cc5bbf37b6 --- /dev/null +++ b/src/Runner.Worker/Handlers/ScriptHandlerHelpers.cs @@ -0,0 +1,83 @@ + +using System; +using System.Collections.Generic; + +namespace GitHub.Runner.Worker.Handlers +{ + internal class ScriptHandlerHelpers + { + private static readonly Dictionary _defaultArguments = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["cmd"] = "/D /E:ON /V:OFF /S /C \"CALL \"{0}\"\"", + ["pwsh"] = "-command \". '{0}'\"", + ["powershell"] = "-command \". '{0}'\"", + ["bash"] = "--noprofile --norc -e -o pipefail {0}", + ["sh"] = "-e {0}", + ["python"] = "{0}" + }; + + private static readonly Dictionary _extensions = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["cmd"] = ".cmd", + ["pwsh"] = ".ps1", + ["powershell"] = ".ps1", + ["bash"] = ".sh", + ["sh"] = ".sh", + ["python"] = ".py" + }; + + internal static string GetScriptArgumentsFormat(string scriptType) + { + if (_defaultArguments.TryGetValue(scriptType, out var argFormat)) + { + return argFormat; + } + return ""; + } + + internal static string GetScriptFileExtension(string scriptType) + { + if (_extensions.TryGetValue(scriptType, out var extension)) + { + return extension; + } + return ""; + } + + internal static string FixUpScriptContents(string scriptType, string contents) + { + switch (scriptType) + { + case "cmd": + // Note, use @echo off instead of using the /Q command line switch. + // When /Q is used, echo can't be turned on. + contents = $"@echo off{Environment.NewLine}{contents}"; + break; + case "powershell": + case "pwsh": + var prepend = "$ErrorActionPreference = 'stop'"; + var append = @"if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE }"; + contents = $"{prepend}{Environment.NewLine}{contents}{Environment.NewLine}{append}"; + break; + } + return contents; + } + + internal static (string shellCommand, string shellArgs) ParseShellOptionString(string shellOption) + { + var shellStringParts = shellOption.Split(" ", 2); + if (shellStringParts.Length == 2) + { + return (shellCommand: shellStringParts[0], shellArgs: shellStringParts[1]); + } + else if (shellStringParts.Length == 1) + { + return (shellCommand: shellStringParts[0], shellArgs: ""); + } + else + { + throw new ArgumentException($"Failed to parse COMMAND [..ARGS] from {shellOption}"); + } + } + } +} diff --git a/src/Runner.Worker/Handlers/StepHost.cs b/src/Runner.Worker/Handlers/StepHost.cs new file mode 100644 index 00000000000..b368c436043 --- /dev/null +++ b/src/Runner.Worker/Handlers/StepHost.cs @@ -0,0 +1,236 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Channels; +using System.Threading.Tasks; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Worker.Container; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using System.Linq; + +namespace GitHub.Runner.Worker.Handlers +{ + public interface IStepHost : IRunnerService + { + event EventHandler OutputDataReceived; + event EventHandler ErrorDataReceived; + + string ResolvePathForStepHost(string path); + + Task DetermineNodeRuntimeVersion(IExecutionContext executionContext); + + Task ExecuteAsync(string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + bool inheritConsoleHandler, + CancellationToken cancellationToken); + } + + [ServiceLocator(Default = typeof(ContainerStepHost))] + public interface IContainerStepHost : IStepHost + { + ContainerInfo Container { get; set; } + string PrependPath { get; set; } + } + + [ServiceLocator(Default = typeof(DefaultStepHost))] + public interface IDefaultStepHost : IStepHost + { + } + + public sealed class DefaultStepHost : RunnerService, IDefaultStepHost + { + public event EventHandler OutputDataReceived; + public event EventHandler ErrorDataReceived; + + public string ResolvePathForStepHost(string path) + { + return path; + } + + public Task DetermineNodeRuntimeVersion(IExecutionContext executionContext) + { + return Task.FromResult("node12"); + } + + public async Task ExecuteAsync(string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + bool inheritConsoleHandler, + CancellationToken cancellationToken) + { + using (var processInvoker = HostContext.CreateService()) + { + processInvoker.OutputDataReceived += OutputDataReceived; + processInvoker.ErrorDataReceived += ErrorDataReceived; + + return await processInvoker.ExecuteAsync(workingDirectory: workingDirectory, + fileName: fileName, + arguments: arguments, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: null, + inheritConsoleHandler: inheritConsoleHandler, + cancellationToken: cancellationToken); + } + } + } + + public sealed class ContainerStepHost : RunnerService, IContainerStepHost + { + public ContainerInfo Container { get; set; } + public string PrependPath { get; set; } + public event EventHandler OutputDataReceived; + public event EventHandler ErrorDataReceived; + + public string ResolvePathForStepHost(string path) + { + // make sure container exist. + ArgUtil.NotNull(Container, nameof(Container)); + ArgUtil.NotNullOrEmpty(Container.ContainerId, nameof(Container.ContainerId)); + + // remove double quotes around the path + path = path.Trim('\"'); + + // try to resolve path inside container if the request path is part of the mount volume +#if OS_WINDOWS + if (Container.MountVolumes.Exists(x => path.StartsWith(x.SourceVolumePath, StringComparison.OrdinalIgnoreCase))) +#else + if (Container.MountVolumes.Exists(x => path.StartsWith(x.SourceVolumePath))) +#endif + { + return Container.TranslateToContainerPath(path); + } + else + { + return path; + } + } + + public async Task DetermineNodeRuntimeVersion(IExecutionContext executionContext) + { + // Best effort to determine a compatible node runtime + // There may be more variation in which libraries are linked than just musl/glibc, + // so determine based on known distribtutions instead + var osReleaseIdCmd = "sh -c \"cat /etc/*release | grep ^ID\""; + var dockerManager = HostContext.GetService(); + + var output = new List(); + var execExitCode = await dockerManager.DockerExec(executionContext, Container.ContainerId, string.Empty, osReleaseIdCmd, output); + string nodeExternal; + if (execExitCode == 0) + { + foreach (var line in output) + { + executionContext.Debug(line); + if (line.ToLower().Contains("alpine")) + { + nodeExternal = "node12_alpine"; + executionContext.Output($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}"); + return nodeExternal; + } + } + } + // Optimistically use the default + nodeExternal = "node12"; + executionContext.Output($"Running JavaScript Action with default external tool: {nodeExternal}"); + return nodeExternal; + } + + public async Task ExecuteAsync(string workingDirectory, + string fileName, + string arguments, + IDictionary environment, + bool requireExitCodeZero, + Encoding outputEncoding, + bool killProcessOnCancel, + bool inheritConsoleHandler, + CancellationToken cancellationToken) + { + // make sure container exist. + ArgUtil.NotNull(Container, nameof(Container)); + ArgUtil.NotNullOrEmpty(Container.ContainerId, nameof(Container.ContainerId)); + + var dockerManager = HostContext.GetService(); + string dockerClientPath = dockerManager.DockerPath; + + // Usage: docker exec [OPTIONS] CONTAINER COMMAND [ARG...] + IList dockerCommandArgs = new List(); + dockerCommandArgs.Add($"exec"); + + // [OPTIONS] + dockerCommandArgs.Add($"-i"); + dockerCommandArgs.Add($"--workdir {workingDirectory}"); + foreach (var env in environment) + { + // e.g. -e MY_SECRET maps the value into the exec'ed process without exposing + // the value directly in the command + dockerCommandArgs.Add($"-e {env.Key}"); + } + if (!string.IsNullOrEmpty(PrependPath)) + { + // Prepend tool paths to container's PATH + var fullPath = !string.IsNullOrEmpty(Container.ContainerRuntimePath) ? $"{PrependPath}:{Container.ContainerRuntimePath}" : PrependPath; + dockerCommandArgs.Add($"-e PATH=\"{fullPath}\""); + } + + // CONTAINER + dockerCommandArgs.Add($"{Container.ContainerId}"); + + // COMMAND + dockerCommandArgs.Add(fileName); + + // [ARG...] + dockerCommandArgs.Add(arguments); + + string dockerCommandArgstring = string.Join(" ", dockerCommandArgs); + + // make sure all env are using container path + foreach (var envKey in environment.Keys.ToList()) + { + environment[envKey] = this.Container.TranslateToContainerPath(environment[envKey]); + } + + using (var processInvoker = HostContext.CreateService()) + { + processInvoker.OutputDataReceived += OutputDataReceived; + processInvoker.ErrorDataReceived += ErrorDataReceived; + +#if OS_WINDOWS + // It appears that node.exe outputs UTF8 when not in TTY mode. + outputEncoding = Encoding.UTF8; +#else + // Let .NET choose the default. + outputEncoding = null; +#endif + + return await processInvoker.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work), + fileName: dockerClientPath, + arguments: dockerCommandArgstring, + environment: environment, + requireExitCodeZero: requireExitCodeZero, + outputEncoding: outputEncoding, + killProcessOnCancel: killProcessOnCancel, + redirectStandardIn: null, + inheritConsoleHandler: inheritConsoleHandler, + cancellationToken: cancellationToken); + } + } + } +} diff --git a/src/Runner.Worker/IEnvironmentContextData.cs b/src/Runner.Worker/IEnvironmentContextData.cs new file mode 100644 index 00000000000..b07bb169a9f --- /dev/null +++ b/src/Runner.Worker/IEnvironmentContextData.cs @@ -0,0 +1,7 @@ +using System; +using System.Collections.Generic; + +public interface IEnvironmentContextData +{ + IEnumerable> GetRuntimeEnvironmentVariables(); +} diff --git a/src/Runner.Worker/IssueMatcher.cs b/src/Runner.Worker/IssueMatcher.cs new file mode 100644 index 00000000000..a36ea34788a --- /dev/null +++ b/src/Runner.Worker/IssueMatcher.cs @@ -0,0 +1,445 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using System.Text.RegularExpressions; + +namespace GitHub.Runner.Worker +{ + public delegate void OnMatcherChanged(object sender, MatcherChangedEventArgs e); + + public sealed class MatcherChangedEventArgs : EventArgs + { + public MatcherChangedEventArgs(IssueMatcherConfig config) + { + Config = config; + } + + public IssueMatcherConfig Config { get; } + } + + public sealed class IssueMatcher + { + private string _owner; + private IssuePattern[] _patterns; + private IssueMatch[] _state; + + public IssueMatcher(IssueMatcherConfig config, TimeSpan timeout) + { + _owner = config.Owner; + _patterns = config.Patterns.Select(x => new IssuePattern(x , timeout)).ToArray(); + Reset(); + } + + public string Owner + { + get + { + if (_owner == null) + { + _owner = String.Empty; + } + + return _owner; + } + } + + public IssueMatch Match(string line) + { + // Single pattern + if (_patterns.Length == 1) + { + var pattern = _patterns[0]; + var regexMatch = pattern.Regex.Match(line); + + if (regexMatch.Success) + { + return new IssueMatch(null, pattern, regexMatch.Groups); + } + + return null; + } + // Multiple patterns + else + { + // Each pattern (iterate in reverse) + for (int i = _patterns.Length - 1; i >= 0; i--) + { + var runningMatch = i > 0 ? _state[i - 1] : null; + + // First pattern or a running match + if (i == 0 || runningMatch != null) + { + var pattern = _patterns[i]; + var isLast = i == _patterns.Length - 1; + var regexMatch = pattern.Regex.Match(line); + + // Matched + if (regexMatch.Success) + { + // Last pattern + if (isLast) + { + // Loop + if (pattern.Loop) + { + // Clear most state, but preserve the running match + Reset(); + _state[i - 1] = runningMatch; + } + // Not loop + else + { + // Clear the state + Reset(); + } + + // Return + return new IssueMatch(runningMatch, pattern, regexMatch.Groups); + } + // Not the last pattern + else + { + // Store the match + _state[i] = new IssueMatch(runningMatch, pattern, regexMatch.Groups); + } + } + // Not matched + else + { + // Last pattern + if (isLast) + { + // Break the running match + _state[i - 1] = null; + } + // Not the last pattern + else + { + // Record not matched + _state[i] = null; + } + } + } + } + + return null; + } + } + + public void Reset() + { + _state = new IssueMatch[_patterns.Length - 1]; + } + } + + public sealed class IssuePattern + { + public IssuePattern(IssuePatternConfig config, TimeSpan timeout) + { + File = config.File; + Line = config.Line; + Column = config.Column; + Severity = config.Severity; + Code = config.Code; + Message = config.Message; + FromPath = config.FromPath; + Loop = config.Loop; + Regex = new Regex(config.Pattern ?? string.Empty, IssuePatternConfig.RegexOptions, timeout); + } + + public int? File { get; } + + public int? Line { get; } + + public int? Column { get; } + + public int? Severity { get; } + + public int? Code { get; } + + public int? Message { get; } + + public int? FromPath { get; } + + public bool Loop { get; } + + public Regex Regex { get; } + } + + public sealed class IssueMatch + { + public IssueMatch(IssueMatch runningMatch, IssuePattern pattern, GroupCollection groups) + { + File = runningMatch?.File ?? GetValue(groups, pattern.File); + Line = runningMatch?.Line ?? GetValue(groups, pattern.Line); + Column = runningMatch?.Column ?? GetValue(groups, pattern.Column); + Severity = runningMatch?.Severity ?? GetValue(groups, pattern.Severity); + Code = runningMatch?.Code ?? GetValue(groups, pattern.Code); + Message = runningMatch?.Message ?? GetValue(groups, pattern.Message); + FromPath = runningMatch?.FromPath ?? GetValue(groups, pattern.FromPath); + } + + public string File { get; } + + public string Line { get; } + + public string Column { get; } + + public string Severity { get; } + + public string Code { get; } + + public string Message { get; } + + public string FromPath { get; } + + private string GetValue(GroupCollection groups, int? index) + { + if (index.HasValue && index.Value < groups.Count) + { + var group = groups[index.Value]; + return group.Value; + } + + return null; + } + } + + [DataContract] + public sealed class IssueMatchersConfig + { + [DataMember(Name = "problemMatcher")] + private List _matchers; + + public List Matchers + { + get + { + if (_matchers == null) + { + _matchers = new List(); + } + + return _matchers; + } + + set + { + _matchers = value; + } + } + + public void Validate() + { + var distinctOwners = new HashSet(StringComparer.OrdinalIgnoreCase); + + if (_matchers?.Count > 0) + { + foreach (var matcher in _matchers) + { + matcher.Validate(); + + if (!distinctOwners.Add(matcher.Owner)) + { + // Not localized since this is a programming contract + throw new ArgumentException($"Duplicate owner name '{matcher.Owner}'"); + } + } + } + } + } + + [DataContract] + public sealed class IssueMatcherConfig + { + [DataMember(Name = "owner")] + private string _owner; + + [DataMember(Name = "pattern")] + private IssuePatternConfig[] _patterns; + + public string Owner + { + get + { + if (_owner == null) + { + _owner = String.Empty; + } + + return _owner; + } + + set + { + _owner = value; + } + } + + public IssuePatternConfig[] Patterns + { + get + { + if (_patterns == null) + { + _patterns = new IssuePatternConfig[0]; + } + + return _patterns; + } + + set + { + _patterns = value; + } + } + + public void Validate() + { + // Validate owner + if (string.IsNullOrEmpty(_owner)) + { + throw new ArgumentException("Owner must not be empty"); + } + + // Validate at least one pattern + if (_patterns == null || _patterns.Length == 0) + { + throw new ArgumentException($"Matcher '{_owner}' does not contain any patterns"); + } + + int? file = null; + int? line = null; + int? column = null; + int? severity = null; + int? code = null; + int? message = null; + int? fromPath = null; + + // Validate each pattern config + for (var i = 0; i < _patterns.Length; i++) + { + var isFirst = i == 0; + var isLast = i == _patterns.Length - 1; + var pattern = _patterns[i]; + pattern.Validate(isFirst, + isLast, + ref file, + ref line, + ref column, + ref severity, + ref code, + ref message, + ref fromPath); + } + + if (message == null) + { + throw new ArgumentException($"At least one pattern must set 'message'"); + } + } + } + + [DataContract] + public sealed class IssuePatternConfig + { + private const string _filePropertyName = "file"; + private const string _linePropertyName = "line"; + private const string _columnPropertyName = "column"; + private const string _severityPropertyName = "severity"; + private const string _codePropertyName = "code"; + private const string _messagePropertyName = "message"; + private const string _fromPathPropertyName = "fromPath"; + private const string _loopPropertyName = "loop"; + private const string _regexpPropertyName = "regexp"; + internal static readonly RegexOptions RegexOptions = RegexOptions.CultureInvariant | RegexOptions.ECMAScript; + + [DataMember(Name = _filePropertyName)] + public int? File { get; set; } + + [DataMember(Name = _linePropertyName)] + public int? Line { get; set; } + + [DataMember(Name = _columnPropertyName)] + public int? Column { get; set; } + + [DataMember(Name = _severityPropertyName)] + public int? Severity { get; set; } + + [DataMember(Name = _codePropertyName)] + public int? Code { get; set; } + + [DataMember(Name = _messagePropertyName)] + public int? Message { get; set; } + + [DataMember(Name = _fromPathPropertyName)] + public int? FromPath { get; set; } + + [DataMember(Name = _loopPropertyName)] + public bool Loop { get; set; } + + [DataMember(Name = _regexpPropertyName)] + public string Pattern { get; set; } + + public void Validate( + bool isFirst, + bool isLast, + ref int? file, + ref int? line, + ref int? column, + ref int? severity, + ref int? code, + ref int? message, + ref int? fromPath) + { + // Only the last pattern in a multiline matcher may set 'loop' + if (Loop && (isFirst || !isLast)) + { + throw new ArgumentException($"Only the last pattern in a multiline matcher may set '{_loopPropertyName}'"); + } + + if (Loop && Message == null) + { + throw new ArgumentException($"The {_loopPropertyName} pattern must set '{_messagePropertyName}'"); + } + + var regex = new Regex(Pattern ?? string.Empty, RegexOptions); + var groupCount = regex.GetGroupNumbers().Length; + + Validate(_filePropertyName, groupCount, File, ref file); + Validate(_linePropertyName, groupCount, Line, ref line); + Validate(_columnPropertyName, groupCount, Column, ref column); + Validate(_severityPropertyName, groupCount, Severity, ref severity); + Validate(_codePropertyName, groupCount, Code, ref code); + Validate(_messagePropertyName, groupCount, Message, ref message); + Validate(_fromPathPropertyName, groupCount, FromPath, ref fromPath); + } + + private void Validate(string propertyName, int groupCount, int? newValue, ref int? trackedValue) + { + if (newValue == null) + { + return; + } + + // The property '___' is set twice + if (trackedValue != null) + { + throw new ArgumentException($"The property '{propertyName}' is set twice"); + } + + // Out of range + if (newValue.Value < 0 || newValue >= groupCount) + { + throw new ArgumentException($"The property '{propertyName}' is set to {newValue} which is out of range"); + } + + // Record the value + if (newValue != null) + { + trackedValue = newValue; + } + } + } +} diff --git a/src/Runner.Worker/JobContext.cs b/src/Runner.Worker/JobContext.cs new file mode 100644 index 00000000000..05d31ce281b --- /dev/null +++ b/src/Runner.Worker/JobContext.cs @@ -0,0 +1,60 @@ +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Common; + +namespace GitHub.Runner.Worker +{ + public sealed class JobContext : DictionaryContextData + { + public ActionResult? Status + { + get + { + if (this.TryGetValue("status", out var status) && status is StringContextData statusString) + { + return EnumUtil.TryParse(statusString); + } + else + { + return null; + } + } + set + { + this["status"] = new StringContextData(value.ToString()); + } + } + + public DictionaryContextData Services + { + get + { + if (this.TryGetValue("services", out var services) && services is DictionaryContextData servicesDictionary) + { + return servicesDictionary; + } + else + { + this["services"] = new DictionaryContextData(); + return this["services"] as DictionaryContextData; + } + } + } + + public DictionaryContextData Container + { + get + { + if (this.TryGetValue("container", out var container) && container is DictionaryContextData containerDictionary) + { + return containerDictionary; + } + else + { + this["container"] = new DictionaryContextData(); + return this["container"] as DictionaryContextData; + } + } + } + } +} \ No newline at end of file diff --git a/src/Runner.Worker/JobExtension.cs b/src/Runner.Worker/JobExtension.cs new file mode 100644 index 00000000000..d57e1c20108 --- /dev/null +++ b/src/Runner.Worker/JobExtension.cs @@ -0,0 +1,399 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(JobExtension))] + + public interface IJobExtension : IRunnerService + { + Task> InitializeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message); + Task FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc); + } + + public sealed class JobExtension : RunnerService, IJobExtension + { + private readonly HashSet _existingProcesses = new HashSet(StringComparer.OrdinalIgnoreCase); + private bool _processCleanup; + private string _processLookupId = $"github_{Guid.NewGuid()}"; + + // Download all required actions. + // Make sure all condition inputs are valid. + // Build up three list of steps for jobrunner (pre-job, job, post-job). + public async Task> InitializeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message) + { + Trace.Entering(); + ArgUtil.NotNull(jobContext, nameof(jobContext)); + ArgUtil.NotNull(message, nameof(message)); + + // Create a new timeline record for 'Set up job' + IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null); + + List preJobSteps = new List(); + List jobSteps = new List(); + List postJobSteps = new List(); + using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); })) + { + try + { + context.Start(); + context.Debug($"Starting: Set up job"); + context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'"); + var repoFullName = context.GetGitHubContext("repository"); + ArgUtil.NotNull(repoFullName, nameof(repoFullName)); + context.Debug($"Primary repository: {repoFullName}"); + + // Print proxy setting information for better diagnostic experience + var runnerWebProxy = HostContext.GetService(); + if (!string.IsNullOrEmpty(runnerWebProxy.ProxyAddress)) + { + context.Output($"Runner is running behind proxy server: '{runnerWebProxy.ProxyAddress}'"); + } + + // Prepare the workflow directory + context.Output("Prepare workflow directory"); + var directoryManager = HostContext.GetService(); + TrackingConfig trackingConfig = directoryManager.PrepareDirectory( + context, + message.Workspace); + + // Set the directory variables + context.Debug("Update context data"); + string _workDirectory = HostContext.GetDirectory(WellKnownDirectory.Work); + context.SetRunnerContext("workspace", Path.Combine(_workDirectory, trackingConfig.PipelineDirectory)); + context.SetGitHubContext("workspace", Path.Combine(_workDirectory, trackingConfig.WorkspaceDirectory)); + + // Evaluate the job-level environment variables + context.Debug("Evaluating job-level environment variables"); + var templateTrace = context.ToTemplateTraceWriter(); + var schema = new PipelineTemplateSchemaFactory().CreateSchema(); + var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema); + foreach (var token in message.EnvironmentVariables) + { + var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer); + foreach (var pair in environmentVariables) + { + context.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty; + context.SetEnvContext(pair.Key, pair.Value ?? string.Empty); + } + } + + // Evaluate the job container + context.Debug("Evaluating job container"); + var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues); + if (container != null) + { + jobContext.Container = new Container.ContainerInfo(HostContext, container); + } + + // Evaluate the job service containers + context.Debug("Evaluating job service containers"); + var serviceContainers = templateEvaluator.EvaluateJobServiceContainers(message.JobServiceContainers, jobContext.ExpressionValues); + if (serviceContainers?.Count > 0) + { + foreach (var pair in serviceContainers) + { + var networkAlias = pair.Key; + var serviceContainer = pair.Value; + jobContext.ServiceContainers.Add(new Container.ContainerInfo(HostContext, serviceContainer, false, networkAlias)); + } + } + + // Build up 3 lists of steps, pre-job, job, post-job + var postJobStepsBuilder = new Stack(); + + // Download actions not already in the cache + Trace.Info("Downloading actions"); + var actionManager = HostContext.GetService(); + var prepareSteps = await actionManager.PrepareActionsAsync(context, message.Steps); + preJobSteps.AddRange(prepareSteps); + + // Add start-container steps, record and stop-container steps + if (jobContext.Container != null || jobContext.ServiceContainers.Count > 0) + { + var containerProvider = HostContext.GetService(); + var containers = new List(); + if (jobContext.Container != null) + { + containers.Add(jobContext.Container); + } + containers.AddRange(jobContext.ServiceContainers); + + preJobSteps.Add(new JobExtensionRunner(runAsync: containerProvider.StartContainersAsync, + condition: $"{PipelineTemplateConstants.Success}()", + displayName: "Initialize containers", + data: (object)containers)); + postJobStepsBuilder.Push(new JobExtensionRunner(runAsync: containerProvider.StopContainersAsync, + condition: $"{PipelineTemplateConstants.Always}()", + displayName: "Stop containers", + data: (object)containers)); + } + + // Add action steps + foreach (var step in message.Steps) + { + if (step.Type == Pipelines.StepType.Action) + { + var action = step as Pipelines.ActionStep; + Trace.Info($"Adding {action.DisplayName}."); + var actionRunner = HostContext.CreateService(); + actionRunner.Action = action; + actionRunner.Stage = ActionRunStage.Main; + actionRunner.Condition = step.Condition; + var contextData = new Pipelines.ContextData.DictionaryContextData(); + if (message.ContextData?.Count > 0) + { + foreach (var pair in message.ContextData) + { + contextData[pair.Key] = pair.Value; + } + } + + actionRunner.TryEvaluateDisplayName(contextData, context); + jobSteps.Add(actionRunner); + } + } + + // Create execution context for pre-job steps + foreach (var step in preJobSteps) + { + if (step is JobExtensionRunner) + { + JobExtensionRunner extensionStep = step as JobExtensionRunner; + ArgUtil.NotNull(extensionStep, extensionStep.DisplayName); + Guid stepId = Guid.NewGuid(); + extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N")); + } + } + + // Create execution context for job steps + foreach (var step in jobSteps) + { + if (step is IActionRunner actionStep) + { + ArgUtil.NotNull(actionStep, step.DisplayName); + actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName); + } + } + + // Add post-job steps + Trace.Info("Adding post-job steps"); + while (postJobStepsBuilder.Count > 0) + { + postJobSteps.Add(postJobStepsBuilder.Pop()); + } + + // Create execution context for post-job steps + foreach (var step in postJobSteps) + { + if (step is JobExtensionRunner) + { + JobExtensionRunner extensionStep = step as JobExtensionRunner; + ArgUtil.NotNull(extensionStep, extensionStep.DisplayName); + Guid stepId = Guid.NewGuid(); + extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, stepId.ToString("N"), null, null); + } + } + + List steps = new List(); + steps.AddRange(preJobSteps); + steps.AddRange(jobSteps); + steps.AddRange(postJobSteps); + + // Start agent log plugin host process + // var logPlugin = HostContext.GetService(); + // await logPlugin.StartAsync(context, steps, jobContext.CancellationToken); + + // Prepare for orphan process cleanup + _processCleanup = jobContext.Variables.GetBoolean("process.clean") ?? true; + if (_processCleanup) + { + // Set the RUNNER_TRACKING_ID env variable. + Environment.SetEnvironmentVariable(Constants.ProcessTrackingId, _processLookupId); + context.Debug("Collect running processes for tracking orphan processes."); + + // Take a snapshot of current running processes + Dictionary processes = SnapshotProcesses(); + foreach (var proc in processes) + { + // Pid_ProcessName + _existingProcesses.Add($"{proc.Key}_{proc.Value.ProcessName}"); + } + } + + return steps; + } + catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested) + { + // Log the exception and cancel the JobExtension Initialization. + Trace.Error($"Caught cancellation exception from JobExtension Initialization: {ex}"); + context.Error(ex); + context.Result = TaskResult.Canceled; + throw; + } + catch (Exception ex) + { + // Log the error and fail the JobExtension Initialization. + Trace.Error($"Caught exception from JobExtension Initialization: {ex}"); + context.Error(ex); + context.Result = TaskResult.Failed; + throw; + } + finally + { + context.Debug("Finishing: Set up job"); + context.Complete(); + } + } + } + + public async Task FinalizeJob(IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, DateTime jobStartTimeUtc) + { + Trace.Entering(); + ArgUtil.NotNull(jobContext, nameof(jobContext)); + + // create a new timeline record node for 'Finalize job' + IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null); + using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); })) + { + try + { + context.Start(); + context.Debug("Starting: Complete job"); + + // Wait for agent log plugin process exits + // var logPlugin = HostContext.GetService(); + // try + // { + // await logPlugin.WaitAsync(context); + // } + // catch (Exception ex) + // { + // // Log and ignore the error from log plugin finalization. + // Trace.Error($"Caught exception from log plugin finalization: {ex}"); + // context.Output(ex.Message); + // } + + if (context.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false) + { + Trace.Info("Support log upload starting."); + context.Output("Uploading runner diagnostic logs"); + + IDiagnosticLogManager diagnosticLogManager = HostContext.GetService(); + + try + { + await diagnosticLogManager.UploadDiagnosticLogsAsync(executionContext: context, parentContext: jobContext, message: message, jobStartTimeUtc: jobStartTimeUtc); + + Trace.Info("Support log upload complete."); + context.Output("Completed runner diagnostic log upload"); + } + catch (Exception ex) + { + // Log the error but make sure we continue gracefully. + Trace.Info("Error uploading support logs."); + context.Output("Error uploading runner diagnostic logs"); + Trace.Error(ex); + } + } + + if (_processCleanup) + { + context.Output("Cleaning up orphan processes"); + + // Only check environment variable for any process that doesn't run before we invoke our process. + Dictionary currentProcesses = SnapshotProcesses(); + foreach (var proc in currentProcesses) + { + if (proc.Key == Process.GetCurrentProcess().Id) + { + // skip for current process. + continue; + } + + if (_existingProcesses.Contains($"{proc.Key}_{proc.Value.ProcessName}")) + { + Trace.Verbose($"Skip existing process. PID: {proc.Key} ({proc.Value.ProcessName})"); + } + else + { + Trace.Info($"Inspecting process environment variables. PID: {proc.Key} ({proc.Value.ProcessName})"); + + string lookupId = null; + try + { + lookupId = proc.Value.GetEnvironmentVariable(HostContext, Constants.ProcessTrackingId); + } + catch (Exception ex) + { + Trace.Warning($"Ignore exception during read process environment variables: {ex.Message}"); + Trace.Verbose(ex.ToString()); + } + + if (string.Equals(lookupId, _processLookupId, StringComparison.OrdinalIgnoreCase)) + { + context.Output($"Terminate orphan process: pid ({proc.Key}) ({proc.Value.ProcessName})"); + try + { + proc.Value.Kill(); + } + catch (Exception ex) + { + Trace.Error("Catch exception during orphan process cleanup."); + Trace.Error(ex); + } + } + } + } + } + } + catch (Exception ex) + { + // Log and ignore the error from JobExtension finalization. + Trace.Error($"Caught exception from JobExtension finalization: {ex}"); + context.Output(ex.Message); + } + finally + { + context.Debug("Finishing: Complete job"); + context.Complete(); + } + } + } + + private Dictionary SnapshotProcesses() + { + Dictionary snapshot = new Dictionary(); + foreach (var proc in Process.GetProcesses()) + { + try + { + // On Windows, this will throw exception on error. + // On Linux, this will be NULL on error. + if (!string.IsNullOrEmpty(proc.ProcessName)) + { + snapshot[proc.Id] = proc; + } + } + catch (Exception ex) + { + Trace.Verbose($"Ignore any exception during taking process snapshot of process pid={proc.Id}: '{ex.Message}'."); + } + } + + Trace.Info($"Total accessible running process: {snapshot.Count}."); + return snapshot; + } + } +} diff --git a/src/Runner.Worker/JobExtensionRunner.cs b/src/Runner.Worker/JobExtensionRunner.cs new file mode 100644 index 00000000000..48c2c2358e3 --- /dev/null +++ b/src/Runner.Worker/JobExtensionRunner.cs @@ -0,0 +1,37 @@ +using System; +using System.Threading.Tasks; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.Runner.Worker +{ + public sealed class JobExtensionRunner : IStep + { + private readonly object _data; + private readonly Func _runAsync; + + public JobExtensionRunner( + Func runAsync, + string condition, + string displayName, + object data) + { + _runAsync = runAsync; + Condition = condition; + DisplayName = displayName; + _data = data; + } + + public string Condition { get; set; } + public TemplateToken ContinueOnError => new BooleanToken(null, null, null, false); + public string DisplayName { get; set; } + public IExecutionContext ExecutionContext { get; set; } + public TemplateToken Timeout => new NumberToken(null, null, null, 0); + public object Data => _data; + + public async Task RunAsync() + { + await _runAsync(ExecutionContext, _data); + } + } +} diff --git a/src/Runner.Worker/JobRunner.cs b/src/Runner.Worker/JobRunner.cs new file mode 100644 index 00000000000..58f283f6ab1 --- /dev/null +++ b/src/Runner.Worker/JobRunner.cs @@ -0,0 +1,292 @@ +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common.Util; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using System.Net.Http; +using System.Text; +using System.IO.Compression; +using System.Diagnostics; +using Newtonsoft.Json.Linq; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.ObjectTemplating; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(JobRunner))] + public interface IJobRunner : IRunnerService + { + Task RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken); + } + + public sealed class JobRunner : RunnerService, IJobRunner + { + private IJobServerQueue _jobServerQueue; + private ITempDirectoryManager _tempDirectoryManager; + + public async Task RunAsync(Pipelines.AgentJobRequestMessage message, CancellationToken jobRequestCancellationToken) + { + // Validate parameters. + Trace.Entering(); + ArgUtil.NotNull(message, nameof(message)); + ArgUtil.NotNull(message.Resources, nameof(message.Resources)); + ArgUtil.NotNull(message.Variables, nameof(message.Variables)); + ArgUtil.NotNull(message.Steps, nameof(message.Steps)); + Trace.Info("Job ID {0}", message.JobId); + + DateTime jobStartTimeUtc = DateTime.UtcNow; + + ServiceEndpoint systemConnection = message.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + + // Setup the job server and job server queue. + var jobServer = HostContext.GetService(); + VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection); + Uri jobServerUrl = systemConnection.Url; + + Trace.Info($"Creating job server with URL: {jobServerUrl}"); + // jobServerQueue is the throttling reporter. + _jobServerQueue = HostContext.GetService(); + VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) }); + await jobServer.ConnectAsync(jobConnection); + + _jobServerQueue.Start(message); + HostContext.WritePerfCounter($"WorkerJobServerQueueStarted_{message.RequestId.ToString()}"); + + IExecutionContext jobContext = null; + CancellationTokenRegistration? runnerShutdownRegistration = null; + try + { + // Create the job execution context. + jobContext = HostContext.CreateService(); + jobContext.InitializeJob(message, jobRequestCancellationToken); + Trace.Info("Starting the job execution context."); + jobContext.Start(); + jobContext.Debug($"Starting: {message.JobDisplayName}"); + + runnerShutdownRegistration = HostContext.RunnerShutdownToken.Register(() => + { + // log an issue, then runner get shutdown by Ctrl-C or Ctrl-Break. + // the server will use Ctrl-Break to tells the runner that operating system is shutting down. + string errorMessage; + switch (HostContext.RunnerShutdownReason) + { + case ShutdownReason.UserCancelled: + errorMessage = "The runner has received a shutdown signal. This can happen when the runner service is stopped, or a manually started runner is canceled."; + break; + case ShutdownReason.OperatingSystemShutdown: + errorMessage = $"Operating system is shutting down for computer '{Environment.MachineName}'"; + break; + default: + throw new ArgumentException(HostContext.RunnerShutdownReason.ToString(), nameof(HostContext.RunnerShutdownReason)); + } + jobContext.AddIssue(new Issue() { Type = IssueType.Error, Message = errorMessage }); + }); + + // Validate directory permissions. + string workDirectory = HostContext.GetDirectory(WellKnownDirectory.Work); + Trace.Info($"Validating directory permissions for: '{workDirectory}'"); + try + { + Directory.CreateDirectory(workDirectory); + IOUtil.ValidateExecutePermission(workDirectory); + } + catch (Exception ex) + { + Trace.Error(ex); + jobContext.Error(ex); + return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed); + } + + jobContext.SetRunnerContext("os", VarUtil.OS); + + string toolsDirectory = HostContext.GetDirectory(WellKnownDirectory.Tools); + Directory.CreateDirectory(toolsDirectory); + jobContext.SetRunnerContext("tool_cache", toolsDirectory); + + // remove variable from env + Environment.SetEnvironmentVariable("AGENT_TOOLSDIRECTORY", null); + Environment.SetEnvironmentVariable(Constants.Variables.Agent.ToolsDirectory, null); + + // Setup TEMP directories + _tempDirectoryManager = HostContext.GetService(); + _tempDirectoryManager.InitializeTempDirectory(jobContext); + + // // Expand container properties + // jobContext.Container?.ExpandProperties(jobContext.Variables); + // foreach (var sidecar in jobContext.SidecarContainers) + // { + // sidecar.ExpandProperties(jobContext.Variables); + // } + + // Get the job extension. + Trace.Info("Getting job extension."); + IJobExtension jobExtension = HostContext.CreateService(); + List jobSteps = null; + try + { + Trace.Info("Initialize job. Getting all job steps."); + jobSteps = await jobExtension.InitializeJob(jobContext, message); + } + catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested) + { + // set the job to canceled + // don't log error issue to job ExecutionContext, since server owns the job level issue + Trace.Error($"Job is canceled during initialize."); + Trace.Error($"Caught exception: {ex}"); + return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Canceled); + } + catch (Exception ex) + { + // set the job to failed. + // don't log error issue to job ExecutionContext, since server owns the job level issue + Trace.Error($"Job initialize failed."); + Trace.Error($"Caught exception from {nameof(jobExtension.InitializeJob)}: {ex}"); + return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed); + } + + // trace out all steps + Trace.Info($"Total job steps: {jobSteps.Count}."); + Trace.Verbose($"Job steps: '{string.Join(", ", jobSteps.Select(x => x.DisplayName))}'"); + HostContext.WritePerfCounter($"WorkerJobInitialized_{message.RequestId.ToString()}"); + + // Run all job steps + Trace.Info("Run all job steps."); + var stepsRunner = HostContext.GetService(); + try + { + foreach (var step in jobSteps) + { + jobContext.JobSteps.Enqueue(step); + } + + await stepsRunner.RunAsync(jobContext); + } + catch (Exception ex) + { + // StepRunner should never throw exception out. + // End up here mean there is a bug in StepRunner + // Log the error and fail the job. + Trace.Error($"Caught exception from job steps {nameof(StepsRunner)}: {ex}"); + jobContext.Error(ex); + return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed); + } + finally + { + Trace.Info("Finalize job."); + await jobExtension.FinalizeJob(jobContext, message, jobStartTimeUtc); + } + + Trace.Info($"Job result after all job steps finish: {jobContext.Result ?? TaskResult.Succeeded}"); + + Trace.Info("Completing the job execution context."); + return await CompleteJobAsync(jobServer, jobContext, message); + } + finally + { + if (runnerShutdownRegistration != null) + { + runnerShutdownRegistration.Value.Dispose(); + runnerShutdownRegistration = null; + } + + await ShutdownQueue(throwOnFailure: false); + } + } + + private async Task CompleteJobAsync(IJobServer jobServer, IExecutionContext jobContext, Pipelines.AgentJobRequestMessage message, TaskResult? taskResult = null) + { + jobContext.Debug($"Finishing: {message.JobDisplayName}"); + TaskResult result = jobContext.Complete(taskResult); + + try + { + await ShutdownQueue(throwOnFailure: true); + } + catch (Exception ex) + { + Trace.Error($"Caught exception from {nameof(JobServerQueue)}.{nameof(_jobServerQueue.ShutdownAsync)}"); + Trace.Error("This indicate a failure during publish output variables. Fail the job to prevent unexpected job outputs."); + Trace.Error(ex); + result = TaskResultUtil.MergeTaskResults(result, TaskResult.Failed); + } + + // Clean TEMP after finish process jobserverqueue, since there might be a pending fileupload still use the TEMP dir. + _tempDirectoryManager?.CleanupTempDirectory(); + + if (!jobContext.Features.HasFlag(PlanFeatures.JobCompletedPlanEvent)) + { + Trace.Info($"Skip raise job completed event call from worker because Plan version is {message.Plan.Version}"); + return result; + } + + Trace.Info("Raising job completed event."); + var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result); + + var completeJobRetryLimit = 5; + var exceptions = new List(); + while (completeJobRetryLimit-- > 0) + { + try + { + await jobServer.RaisePlanEventAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, jobCompletedEvent, default(CancellationToken)); + return result; + } + catch (TaskOrchestrationPlanNotFoundException ex) + { + Trace.Error($"TaskOrchestrationPlanNotFoundException received, while attempting to raise JobCompletedEvent for job {message.JobId}."); + Trace.Error(ex); + return TaskResult.Failed; + } + catch (TaskOrchestrationPlanSecurityException ex) + { + Trace.Error($"TaskOrchestrationPlanSecurityException received, while attempting to raise JobCompletedEvent for job {message.JobId}."); + Trace.Error(ex); + return TaskResult.Failed; + } + catch (Exception ex) + { + Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}."); + Trace.Error(ex); + exceptions.Add(ex); + } + + // delay 5 seconds before next retry. + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + // rethrow exceptions from all attempts. + throw new AggregateException(exceptions); + } + + private async Task ShutdownQueue(bool throwOnFailure) + { + if (_jobServerQueue != null) + { + try + { + Trace.Info("Shutting down the job server queue."); + await _jobServerQueue.ShutdownAsync(); + } + catch (Exception ex) when (!throwOnFailure) + { + Trace.Error($"Caught exception from {nameof(JobServerQueue)}.{nameof(_jobServerQueue.ShutdownAsync)}"); + Trace.Error(ex); + } + finally + { + _jobServerQueue = null; // Prevent multiple attempts. + } + } + } + } +} diff --git a/src/Runner.Worker/PipelineDirectoryManager.cs b/src/Runner.Worker/PipelineDirectoryManager.cs new file mode 100644 index 00000000000..4e5819da277 --- /dev/null +++ b/src/Runner.Worker/PipelineDirectoryManager.cs @@ -0,0 +1,211 @@ +using System; +using System.IO; +using System.Linq; +using GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(PipelineDirectoryManager))] + public interface IPipelineDirectoryManager : IRunnerService + { + TrackingConfig PrepareDirectory( + IExecutionContext executionContext, + WorkspaceOptions workspace); + + TrackingConfig UpdateRepositoryDirectory( + IExecutionContext executionContext, + string repositoryFullName, + string repositoryPath, + bool workspaceRepository); + } + + public sealed class PipelineDirectoryManager : RunnerService, IPipelineDirectoryManager + { + public TrackingConfig PrepareDirectory( + IExecutionContext executionContext, + WorkspaceOptions workspace) + { + // Validate parameters. + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + var trackingManager = HostContext.GetService(); + + var repoFullName = executionContext.GetGitHubContext("repository"); + ArgUtil.NotNullOrEmpty(repoFullName, nameof(repoFullName)); + + // Load the existing tracking file if one already exists. + string trackingFile = Path.Combine( + HostContext.GetDirectory(WellKnownDirectory.Work), + Constants.Pipeline.Path.PipelineMappingDirectory, + repoFullName, + Constants.Pipeline.Path.TrackingConfigFile); + Trace.Info($"Loading tracking config if exists: {trackingFile}"); + TrackingConfig trackingConfig = trackingManager.LoadIfExists(executionContext, trackingFile); + + // Create a new tracking config if required. + if (trackingConfig == null) + { + Trace.Info("Creating a new tracking config file."); + trackingConfig = trackingManager.Create( + executionContext, + trackingFile); + ArgUtil.NotNull(trackingConfig, nameof(trackingConfig)); + } + else + { + // For existing tracking config files, update the job run properties. + Trace.Info("Updating job run properties."); + trackingConfig.LastRunOn = DateTimeOffset.Now; + trackingManager.Update(executionContext, trackingConfig, trackingFile); + } + + // Prepare the pipeline directory. + if (string.Equals(workspace?.Clean, PipelineConstants.WorkspaceCleanOptions.All, StringComparison.OrdinalIgnoreCase)) + { + CreateDirectory( + executionContext, + description: "pipeline directory", + path: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), trackingConfig.PipelineDirectory), + deleteExisting: true); + + CreateDirectory( + executionContext, + description: "workspace directory", + path: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), trackingConfig.WorkspaceDirectory), + deleteExisting: true); + } + else if (string.Equals(workspace?.Clean, PipelineConstants.WorkspaceCleanOptions.Resources, StringComparison.OrdinalIgnoreCase)) + { + foreach (var repository in trackingConfig.Repositories) + { + CreateDirectory( + executionContext, + description: $"directory {repository.Value.RepositoryPath} for repository {repository.Key}", + path: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), repository.Value.RepositoryPath), + deleteExisting: true); + } + } + else if (string.Equals(workspace?.Clean, PipelineConstants.WorkspaceCleanOptions.Outputs, StringComparison.OrdinalIgnoreCase)) + { + var allDirectories = Directory.GetDirectories(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), trackingConfig.PipelineDirectory)).ToList(); + foreach (var repository in trackingConfig.Repositories) + { + allDirectories.Remove(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), repository.Value.RepositoryPath)); + } + + foreach (var deleteDir in allDirectories) + { + executionContext.Debug($"Delete existing untracked directory '{deleteDir}'"); + DeleteDirectory(executionContext, "untracked dir", deleteDir); + } + } + else + { + CreateDirectory( + executionContext, + description: "pipeline directory", + path: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), trackingConfig.PipelineDirectory), + deleteExisting: false); + + CreateDirectory( + executionContext, + description: "workspace directory", + path: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), trackingConfig.WorkspaceDirectory), + deleteExisting: false); + } + + return trackingConfig; + } + + public TrackingConfig UpdateRepositoryDirectory( + IExecutionContext executionContext, + string repositoryFullName, + string repositoryPath, + bool workspaceRepository) + { + // Validate parameters. + Trace.Entering(); + ArgUtil.NotNull(executionContext, nameof(executionContext)); + ArgUtil.NotNullOrEmpty(repositoryFullName, nameof(repositoryFullName)); + ArgUtil.NotNullOrEmpty(repositoryPath, nameof(repositoryPath)); + + // we need the repository for the pipeline, since the tracking file is based on the workflow repository + var pipelineRepoFullName = executionContext.GetGitHubContext("repository"); + ArgUtil.NotNullOrEmpty(pipelineRepoFullName, nameof(pipelineRepoFullName)); + + // Load the existing tracking file. + string trackingFile = Path.Combine( + HostContext.GetDirectory(WellKnownDirectory.Work), + Constants.Pipeline.Path.PipelineMappingDirectory, + pipelineRepoFullName, + Constants.Pipeline.Path.TrackingConfigFile); + + Trace.Verbose($"Loading tracking config if exists: {trackingFile}"); + var trackingManager = HostContext.GetService(); + TrackingConfig existingConfig = trackingManager.LoadIfExists(executionContext, trackingFile); + ArgUtil.NotNull(existingConfig, nameof(existingConfig)); + + Trace.Info($"Update repository {repositoryFullName}'s path to '{repositoryPath}'"); + string pipelineDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), existingConfig.PipelineDirectory); + if (repositoryPath.StartsWith(pipelineDirectory + Path.DirectorySeparatorChar) || repositoryPath.StartsWith(pipelineDirectory + Path.AltDirectorySeparatorChar)) + { + // The workspaceDirectory in tracking file is a relative path to runner's pipeline directory. + var repositoryRelativePath = repositoryPath.Substring(HostContext.GetDirectory(WellKnownDirectory.Work).Length + 1).TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + if (!existingConfig.Repositories.ContainsKey(repositoryFullName)) + { + existingConfig.Repositories[repositoryFullName] = new RepositoryTrackingConfig(); + } + + existingConfig.Repositories[repositoryFullName].RepositoryPath = repositoryRelativePath; + existingConfig.Repositories[repositoryFullName].LastRunOn = DateTimeOffset.Now; + + if (workspaceRepository) + { + Trace.Info($"Update workspace to '{repositoryPath}'"); + existingConfig.WorkspaceDirectory = repositoryRelativePath; + executionContext.SetGitHubContext("workspace", repositoryPath); + } + + // Update the tracking config files. + Trace.Info("Updating repository tracking."); + trackingManager.Update(executionContext, existingConfig, trackingFile); + + return existingConfig; + } + else + { + throw new ArgumentException($"Repository path '{repositoryPath}' should be located under runner's pipeline directory '{pipelineDirectory}'."); + } + } + + private void CreateDirectory(IExecutionContext executionContext, string description, string path, bool deleteExisting) + { + // Delete. + if (deleteExisting) + { + executionContext.Debug($"Delete existing {description}: '{path}'"); + DeleteDirectory(executionContext, description, path); + } + + // Create. + if (!Directory.Exists(path)) + { + executionContext.Debug($"Creating {description}: '{path}'"); + Trace.Info($"Creating {description}."); + Directory.CreateDirectory(path); + } + } + + private void DeleteDirectory(IExecutionContext executionContext, string description, string path) + { + Trace.Info($"Checking if {description} exists: '{path}'"); + if (Directory.Exists(path)) + { + executionContext.Debug($"Deleting {description}: '{path}'"); + IOUtil.DeleteDirectory(path, executionContext.CancellationToken); + } + } + } +} diff --git a/src/Runner.Worker/Program.cs b/src/Runner.Worker/Program.cs new file mode 100644 index 00000000000..61ab3eeb00d --- /dev/null +++ b/src/Runner.Worker/Program.cs @@ -0,0 +1,68 @@ +using GitHub.Runner.Common.Util; +using System; +using System.Globalization; +using System.Threading.Tasks; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + public static class Program + { + public static int Main(string[] args) + { + using (HostContext context = new HostContext("Worker")) + { + return MainAsync(context, args).GetAwaiter().GetResult(); + } + } + + public static async Task MainAsync(IHostContext context, string[] args) + { + // We may want to consider registering this handler in Worker.cs, similiar to the unloading/SIGTERM handler + //ITerminal registers a CTRL-C handler, which keeps the Runner.Worker process running + //and lets the Runner.Listener handle gracefully the exit. + var term = context.GetService(); + Tracing trace = context.GetTrace(nameof(GitHub.Runner.Worker)); + try + { + trace.Info($"Version: {BuildConstants.RunnerPackage.Version}"); + trace.Info($"Commit: {BuildConstants.Source.CommitHash}"); + trace.Info($"Culture: {CultureInfo.CurrentCulture.Name}"); + trace.Info($"UI Culture: {CultureInfo.CurrentUICulture.Name}"); + context.WritePerfCounter("WorkerProcessStarted"); + + // Validate args. + ArgUtil.NotNull(args, nameof(args)); + ArgUtil.Equal(3, args.Length, nameof(args.Length)); + ArgUtil.NotNullOrEmpty(args[0], $"{nameof(args)}[0]"); + ArgUtil.Equal("spawnclient", args[0].ToLowerInvariant(), $"{nameof(args)}[0]"); + ArgUtil.NotNullOrEmpty(args[1], $"{nameof(args)}[1]"); + ArgUtil.NotNullOrEmpty(args[2], $"{nameof(args)}[2]"); + var worker = context.GetService(); + + // Run the worker. + return await worker.RunAsync( + pipeIn: args[1], + pipeOut: args[2]); + } + catch (Exception ex) + { + // Populate any exception that cause worker failure back to runner. + Console.WriteLine(ex.ToString()); + try + { + trace.Error(ex); + } + catch (Exception e) + { + // make sure we don't crash the app on trace error. + // since IOException will throw when we run out of disk space. + Console.WriteLine(e.ToString()); + } + } + + return 1; + } + } +} diff --git a/src/Runner.Worker/Runner.Worker.csproj b/src/Runner.Worker/Runner.Worker.csproj new file mode 100644 index 00000000000..d2543cf5e1a --- /dev/null +++ b/src/Runner.Worker/Runner.Worker.csproj @@ -0,0 +1,74 @@ + + + + netcoreapp2.2 + Exe + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + + + + + + + + + + + + + + + + + + GitHub.Runner.Worker.action_yaml.json + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Runner.Worker/RunnerContext.cs b/src/Runner.Worker/RunnerContext.cs new file mode 100644 index 00000000000..904caf6ecfa --- /dev/null +++ b/src/Runner.Worker/RunnerContext.cs @@ -0,0 +1,17 @@ +using GitHub.DistributedTask.Pipelines.ContextData; +using System; +using System.Collections.Generic; + +namespace GitHub.Runner.Worker +{ + public sealed class RunnerContext : DictionaryContextData, IEnvironmentContextData + { + public IEnumerable> GetRuntimeEnvironmentVariables() + { + foreach (var data in this) + { + yield return new KeyValuePair($"RUNNER_{data.Key.ToUpperInvariant()}", data.Value as StringContextData); + } + } + } +} \ No newline at end of file diff --git a/src/Runner.Worker/RunnerPluginManager.cs b/src/Runner.Worker/RunnerPluginManager.cs new file mode 100644 index 00000000000..1157a176287 --- /dev/null +++ b/src/Runner.Worker/RunnerPluginManager.cs @@ -0,0 +1,149 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Sdk; +using GitHub.Runner.Common.Util; +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.IO; +using System.Reflection; +using System.Runtime.Loader; +using System.Threading; +using System.Threading.Tasks; +using System.Text; +using GitHub.DistributedTask.Pipelines.ContextData; +using System.Threading.Channels; +using GitHub.Runner.Common; +using System.Linq; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(RunnerPluginManager))] + public interface IRunnerPluginManager : IRunnerService + { + RunnerPluginActionInfo GetPluginAction(string plugin); + Task RunPluginActionAsync(IExecutionContext context, string plugin, Dictionary inputs, Dictionary environment, Variables runtimeVariables, EventHandler outputHandler); + } + + public sealed class RunnerPluginManager : RunnerService, IRunnerPluginManager + { + private readonly Dictionary _actionPlugins = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { + "checkout", + new RunnerPluginActionInfo() + { + Description = "Get sources from a Git repository", + FriendlyName = "Get sources", + PluginTypeName = "GitHub.Runner.Plugins.Repository.v1_0.CheckoutTask, Runner.Plugins" + } + }, + { + "checkoutV1_1", + new RunnerPluginActionInfo() + { + Description = "Get sources from a Git repository", + FriendlyName = "Get sources", + PluginTypeName = "GitHub.Runner.Plugins.Repository.v1_1.CheckoutTask, Runner.Plugins", + PostPluginTypeName = "GitHub.Runner.Plugins.Repository.v1_1.CleanupTask, Runner.Plugins" + } + }, + { + "publish", + new RunnerPluginActionInfo() + { + PluginTypeName = "GitHub.Runner.Plugins.Artifact.PublishArtifact, Runner.Plugins" + } + }, + { + "download", + new RunnerPluginActionInfo() + { + PluginTypeName = "GitHub.Runner.Plugins.Artifact.DownloadArtifact, Runner.Plugins" + } + } + }; + + public RunnerPluginActionInfo GetPluginAction(string plugin) + { + if (_actionPlugins.ContainsKey(plugin)) + { + return _actionPlugins[plugin]; + } + else + { + return null; + } + } + + public async Task RunPluginActionAsync(IExecutionContext context, string plugin, Dictionary inputs, Dictionary environment, Variables runtimeVariables, EventHandler outputHandler) + { + ArgUtil.NotNullOrEmpty(plugin, nameof(plugin)); + + // Only allow plugins we defined + if (!_actionPlugins.Any(x => x.Value.PluginTypeName == plugin || x.Value.PostPluginTypeName == plugin)) + { + throw new NotSupportedException(plugin); + } + + // Resolve the working directory. + string workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work); + ArgUtil.Directory(workingDirectory, nameof(workingDirectory)); + + // Runner.PluginHost + string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), $"Runner.PluginHost{IOUtil.ExeExtension}"); + ArgUtil.File(file, $"Runner.PluginHost{IOUtil.ExeExtension}"); + + // Runner.PluginHost's arguments + string arguments = $"action \"{plugin}\""; + + // construct plugin context + RunnerActionPluginExecutionContext pluginContext = new RunnerActionPluginExecutionContext + { + Inputs = inputs, + Endpoints = context.Endpoints, + Context = context.ExpressionValues + }; + + // variables + foreach (var variable in context.Variables.AllVariables) + { + pluginContext.Variables[variable.Name] = new VariableValue(variable.Value, variable.Secret); + } + + using (var processInvoker = HostContext.CreateService()) + { + var redirectStandardIn = Channel.CreateUnbounded(new UnboundedChannelOptions() { SingleReader = true, SingleWriter = true }); + redirectStandardIn.Writer.TryWrite(JsonUtility.ToString(pluginContext)); + + processInvoker.OutputDataReceived += outputHandler; + processInvoker.ErrorDataReceived += outputHandler; + + // Execute the process. Exit code 0 should always be returned. + // A non-zero exit code indicates infrastructural failure. + // Task failure should be communicated over STDOUT using ## commands. + await processInvoker.ExecuteAsync(workingDirectory: workingDirectory, + fileName: file, + arguments: arguments, + environment: environment, + requireExitCodeZero: true, + outputEncoding: Encoding.UTF8, + killProcessOnCancel: false, + redirectStandardIn: redirectStandardIn, + cancellationToken: context.CancellationToken); + } + } + private Assembly ResolveAssembly(AssemblyLoadContext context, AssemblyName assembly) + { + string assemblyFilename = assembly.Name + ".dll"; + return context.LoadFromAssemblyPath(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), assemblyFilename)); + } + } + + public class RunnerPluginActionInfo + { + public string Description { get; set; } + public string FriendlyName { get; set; } + public string PluginTypeName { get; set; } + public string PostPluginTypeName { get; set; } + } +} diff --git a/src/Runner.Worker/StepsContext.cs b/src/Runner.Worker/StepsContext.cs new file mode 100644 index 00000000000..41ea72961d2 --- /dev/null +++ b/src/Runner.Worker/StepsContext.cs @@ -0,0 +1,88 @@ +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using System; +using System.Collections; +using System.Collections.Generic; +using System.Text.RegularExpressions; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + public sealed class StepsContext + { + private static readonly Regex _propertyRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled); + private readonly DictionaryContextData _contextData = new DictionaryContextData(); + + public DictionaryContextData GetScope(string scopeName) + { + if (scopeName == null) + { + scopeName = string.Empty; + } + + var scope = default(DictionaryContextData); + if (_contextData.TryGetValue(scopeName, out var scopeValue)) + { + scope = scopeValue.AssertDictionary("scope"); + } + else + { + scope = new DictionaryContextData(); + _contextData.Add(scopeName, scope); + } + + return scope; + } + + public void SetOutput( + string scopeName, + string stepName, + string outputName, + string value, + out string reference) + { + var step = GetStep(scopeName, stepName); + var outputs = step["outputs"].AssertDictionary("outputs"); + outputs[outputName] = new StringContextData(value); + if (_propertyRegex.IsMatch(outputName)) + { + reference = $"steps.{stepName}.outputs.{outputName}"; + } + else + { + reference = $"steps['{stepName}']['outputs']['{outputName}']"; + } + } + + public void SetResult( + string scopeName, + string stepName, + string result) + { + var step = GetStep(scopeName, stepName); + step["result"] = new StringContextData(result); + } + + private DictionaryContextData GetStep(string scopeName, string stepName) + { + var scope = GetScope(scopeName); + var step = default(DictionaryContextData); + if (scope.TryGetValue(stepName, out var stepValue)) + { + step = stepValue.AssertDictionary("step"); + } + else + { + step = new DictionaryContextData + { + { "outputs", new DictionaryContextData() }, + }; + scope.Add(stepName, step); + } + + return step; + } + } +} diff --git a/src/Runner.Worker/StepsRunner.cs b/src/Runner.Worker/StepsRunner.cs new file mode 100644 index 00000000000..c87bb056988 --- /dev/null +++ b/src/Runner.Worker/StepsRunner.cs @@ -0,0 +1,465 @@ +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + public interface IStep + { + string Condition { get; set; } + TemplateToken ContinueOnError { get; } + string DisplayName { get; set; } + IExecutionContext ExecutionContext { get; set; } + TemplateToken Timeout { get; } + Task RunAsync(); + } + + [ServiceLocator(Default = typeof(StepsRunner))] + public interface IStepsRunner : IRunnerService + { + Task RunAsync(IExecutionContext Context); + } + + public sealed class StepsRunner : RunnerService, IStepsRunner + { + // StepsRunner should never throw exception to caller + public async Task RunAsync(IExecutionContext jobContext) + { + ArgUtil.NotNull(jobContext, nameof(jobContext)); + ArgUtil.NotNull(jobContext.JobSteps, nameof(jobContext.JobSteps)); + + // TaskResult: + // Abandoned (Server set this.) + // Canceled + // Failed + // Skipped + // Succeeded + CancellationTokenRegistration? jobCancelRegister = null; + jobContext.JobContext.Status = (jobContext.Result ?? TaskResult.Succeeded).ToActionResult(); + var scopeInputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + bool checkPostJobActions = false; + while (jobContext.JobSteps.Count > 0 || !checkPostJobActions) + { + if (jobContext.JobSteps.Count == 0 && !checkPostJobActions) + { + checkPostJobActions = true; + while (jobContext.PostJobSteps.TryPop(out var postStep)) + { + jobContext.JobSteps.Enqueue(postStep); + } + + continue; + } + + var step = jobContext.JobSteps.Dequeue(); + IStep nextStep = null; + if (jobContext.JobSteps.Count > 0) + { + nextStep = jobContext.JobSteps.Peek(); + } + + Trace.Info($"Processing step: DisplayName='{step.DisplayName}'"); + ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext)); + ArgUtil.NotNull(step.ExecutionContext.Variables, nameof(step.ExecutionContext.Variables)); + + // Start + step.ExecutionContext.Start(); + + // Set GITHUB_ACTION + if (step is IActionRunner actionStep) + { + step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name); + } + + // Initialize scope + if (InitializeScope(step, scopeInputs)) + { + var expressionManager = HostContext.GetService(); + try + { + // Register job cancellation call back only if job cancellation token not been fire before each step run + if (!jobContext.CancellationToken.IsCancellationRequested) + { + // Test the condition again. The job was canceled after the condition was originally evaluated. + jobCancelRegister = jobContext.CancellationToken.Register(() => + { + // mark job as cancelled + jobContext.Result = TaskResult.Canceled; + jobContext.JobContext.Status = jobContext.Result?.ToActionResult(); + + step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'."); + ConditionResult conditionReTestResult; + if (HostContext.RunnerShutdownToken.IsCancellationRequested) + { + step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown."); + conditionReTestResult = false; + } + else + { + try + { + conditionReTestResult = expressionManager.Evaluate(step.ExecutionContext, step.Condition, hostTracingOnly: true); + } + catch (Exception ex) + { + // Cancel the step since we get exception while re-evaluate step condition. + Trace.Info("Caught exception from expression when re-test condition on job cancellation."); + step.ExecutionContext.Error(ex); + conditionReTestResult = false; + } + } + + if (!conditionReTestResult.Value) + { + // Cancel the step. + Trace.Info("Cancel current running step."); + step.ExecutionContext.CancelToken(); + } + }); + } + else + { + if (jobContext.Result != TaskResult.Canceled) + { + // mark job as cancelled + jobContext.Result = TaskResult.Canceled; + jobContext.JobContext.Status = jobContext.Result?.ToActionResult(); + } + } + + // Evaluate condition. + step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'"); + Exception conditionEvaluateError = null; + ConditionResult conditionResult; + if (HostContext.RunnerShutdownToken.IsCancellationRequested) + { + step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown."); + conditionResult = false; + } + else + { + try + { + conditionResult = expressionManager.Evaluate(step.ExecutionContext, step.Condition); + } + catch (Exception ex) + { + Trace.Info("Caught exception from expression."); + Trace.Error(ex); + conditionResult = false; + conditionEvaluateError = ex; + } + } + + // no evaluate error but condition is false + if (!conditionResult.Value && conditionEvaluateError == null) + { + // Condition == false + Trace.Info("Skipping step due to condition evaluation."); + CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionResult.Trace); + } + else if (conditionEvaluateError != null) + { + // fail the step since there is an evaluate error. + step.ExecutionContext.Error(conditionEvaluateError); + CompleteStep(step, nextStep, TaskResult.Failed); + } + else + { + // Run the step. + await RunStepAsync(step, jobContext.CancellationToken); + CompleteStep(step, nextStep); + } + } + finally + { + if (jobCancelRegister != null) + { + jobCancelRegister?.Dispose(); + jobCancelRegister = null; + } + } + } + + // Update the job result. + if (step.ExecutionContext.Result == TaskResult.Failed) + { + Trace.Info($"Update job result with current step result '{step.ExecutionContext.Result}'."); + jobContext.Result = TaskResultUtil.MergeTaskResults(jobContext.Result, step.ExecutionContext.Result.Value); + jobContext.JobContext.Status = jobContext.Result?.ToActionResult(); + } + else + { + Trace.Info($"No need for updating job result with current step result '{step.ExecutionContext.Result}'."); + } + + Trace.Info($"Current state: job state = '{jobContext.Result}'"); + } + } + + private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken) + { + // Check to see if we can expand the display name + if (step is IActionRunner actionRunner && + actionRunner.Stage == ActionRunStage.Main && + actionRunner.TryEvaluateDisplayName(step.ExecutionContext.ExpressionValues, step.ExecutionContext)) + { + step.ExecutionContext.UpdateTimelineRecordDisplayName(actionRunner.DisplayName); + } + + // Start the step. + Trace.Info("Starting the step."); + step.ExecutionContext.Debug($"Starting: {step.DisplayName}"); + + // Set the timeout + var timeoutMinutes = 0; + var templateEvaluator = CreateTemplateEvaluator(step.ExecutionContext); + try + { + timeoutMinutes = templateEvaluator.EvaluateStepTimeout(step.Timeout, step.ExecutionContext.ExpressionValues); + } + catch (Exception ex) + { + Trace.Info("An error occurred when attempting to determine the step timeout."); + Trace.Error(ex); + step.ExecutionContext.Error("An error occurred when attempting to determine the step timeout."); + step.ExecutionContext.Error(ex); + } + if (timeoutMinutes > 0) + { + var timeout = TimeSpan.FromMinutes(timeoutMinutes); + step.ExecutionContext.SetTimeout(timeout); + } + +#if OS_WINDOWS + try + { + if (Console.InputEncoding.CodePage != 65001) + { + using (var p = HostContext.CreateService()) + { + // Use UTF8 code page + int exitCode = await p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work), + fileName: WhichUtil.Which("chcp", true, Trace), + arguments: "65001", + environment: null, + requireExitCodeZero: false, + outputEncoding: null, + killProcessOnCancel: false, + redirectStandardIn: null, + inheritConsoleHandler: true, + cancellationToken: step.ExecutionContext.CancellationToken); + if (exitCode == 0) + { + Trace.Info("Successfully returned to code page 65001 (UTF8)"); + } + else + { + Trace.Warning($"'chcp 65001' failed with exit code {exitCode}"); + } + } + } + } + catch (Exception ex) + { + Trace.Warning($"'chcp 65001' failed with exception {ex.Message}"); + } +#endif + + try + { + await step.RunAsync(); + } + catch (OperationCanceledException ex) + { + if (step.ExecutionContext.CancellationToken.IsCancellationRequested && + !jobCancellationToken.IsCancellationRequested) + { + Trace.Error($"Caught timeout exception from step: {ex.Message}"); + step.ExecutionContext.Error("The action has timed out."); + step.ExecutionContext.Result = TaskResult.Failed; + } + else + { + // Log the exception and cancel the step. + Trace.Error($"Caught cancellation exception from step: {ex}"); + step.ExecutionContext.Error(ex); + step.ExecutionContext.Result = TaskResult.Canceled; + } + } + catch (Exception ex) + { + // Log the error and fail the step. + Trace.Error($"Caught exception from step: {ex}"); + step.ExecutionContext.Error(ex); + step.ExecutionContext.Result = TaskResult.Failed; + } + + // Merge execution context result with command result + if (step.ExecutionContext.CommandResult != null) + { + step.ExecutionContext.Result = TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value); + } + + // Fixup the step result if ContinueOnError. + if (step.ExecutionContext.Result == TaskResult.Failed) + { + var continueOnError = false; + try + { + continueOnError = templateEvaluator.EvaluateStepContinueOnError(step.ContinueOnError, step.ExecutionContext.ExpressionValues); + } + catch (Exception ex) + { + Trace.Info("The step failed and an error occurred when attempting to determine whether to continue on error."); + Trace.Error(ex); + step.ExecutionContext.Error("The step failed and an error occurred when attempting to determine whether to continue on error."); + step.ExecutionContext.Error(ex); + } + + if (continueOnError) + { + step.ExecutionContext.Result = TaskResult.Succeeded; + Trace.Info($"Updated step result (continue on error)"); + } + } + Trace.Info($"Step result: {step.ExecutionContext.Result}"); + + // Complete the step context. + step.ExecutionContext.Debug($"Finishing: {step.DisplayName}"); + } + + private bool InitializeScope(IStep step, Dictionary scopeInputs) + { + var executionContext = step.ExecutionContext; + var stepsContext = executionContext.StepsContext; + if (!string.IsNullOrEmpty(executionContext.ScopeName)) + { + // Gather uninitialized current and ancestor scopes + var scope = executionContext.Scopes[executionContext.ScopeName]; + var scopesToInitialize = default(Stack); + while (scope != null && !scopeInputs.ContainsKey(scope.Name)) + { + if (scopesToInitialize == null) + { + scopesToInitialize = new Stack(); + } + scopesToInitialize.Push(scope); + scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName]; + } + + // Initialize current and ancestor scopes + while (scopesToInitialize?.Count > 0) + { + scope = scopesToInitialize.Pop(); + executionContext.Debug($"Initializing scope '{scope.Name}'"); + executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName); + executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null; + var templateEvaluator = CreateTemplateEvaluator(executionContext); + var inputs = default(DictionaryContextData); + try + { + inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues); + } + catch (Exception ex) + { + Trace.Info($"Caught exception from initialize scope '{scope.Name}'"); + Trace.Error(ex); + executionContext.Error(ex); + executionContext.Complete(TaskResult.Failed); + return false; + } + + scopeInputs[scope.Name] = inputs; + } + } + + // Setup expression values + var scopeName = executionContext.ScopeName; + executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName); + executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName]; + + return true; + } + + private void CompleteStep(IStep step, IStep nextStep, TaskResult? result = null, string resultCode = null) + { + var executionContext = step.ExecutionContext; + if (!string.IsNullOrEmpty(executionContext.ScopeName)) + { + // Gather current and ancestor scopes to finalize + var scope = executionContext.Scopes[executionContext.ScopeName]; + var scopesToFinalize = default(Queue); + var nextStepScopeName = nextStep?.ExecutionContext.ScopeName; + while (scope != null && + !string.Equals(nextStepScopeName, scope.Name, StringComparison.OrdinalIgnoreCase) && + !(nextStepScopeName ?? string.Empty).StartsWith($"{scope.Name}.", StringComparison.OrdinalIgnoreCase)) + { + if (scopesToFinalize == null) + { + scopesToFinalize = new Queue(); + } + scopesToFinalize.Enqueue(scope); + scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName]; + } + + // Finalize current and ancestor scopes + var stepsContext = step.ExecutionContext.StepsContext; + while (scopesToFinalize?.Count > 0) + { + scope = scopesToFinalize.Dequeue(); + executionContext.Debug($"Finalizing scope '{scope.Name}'"); + executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.Name); + executionContext.ExpressionValues["inputs"] = null; + var templateEvaluator = CreateTemplateEvaluator(executionContext); + var outputs = default(DictionaryContextData); + try + { + outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues); + } + catch (Exception ex) + { + Trace.Info($"Caught exception from finalize scope '{scope.Name}'"); + Trace.Error(ex); + executionContext.Error(ex); + executionContext.Complete(TaskResult.Failed); + return; + } + + if (outputs?.Count > 0) + { + var parentScopeName = scope.ParentName; + var contextName = scope.ContextName; + foreach (var pair in outputs) + { + var outputName = pair.Key; + var outputValue = pair.Value.ToString(); + stepsContext.SetOutput(parentScopeName, contextName, outputName, outputValue, out var reference); + executionContext.Debug($"{reference}='{outputValue}'"); + } + } + } + } + + executionContext.Complete(result, resultCode: resultCode); + } + + private PipelineTemplateEvaluator CreateTemplateEvaluator(IExecutionContext executionContext) + { + var templateTrace = executionContext.ToTemplateTraceWriter(); + var schema = new PipelineTemplateSchemaFactory().CreateSchema(); + return new PipelineTemplateEvaluator(templateTrace, schema); + } + } +} diff --git a/src/Runner.Worker/TempDirectoryManager.cs b/src/Runner.Worker/TempDirectoryManager.cs new file mode 100644 index 00000000000..a3278824377 --- /dev/null +++ b/src/Runner.Worker/TempDirectoryManager.cs @@ -0,0 +1,62 @@ +using GitHub.Runner.Common.Util; +using System; +using System.IO; +using System.Threading; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(TempDirectoryManager))] + public interface ITempDirectoryManager : IRunnerService + { + void InitializeTempDirectory(IExecutionContext jobContext); + void CleanupTempDirectory(); + } + + public sealed class TempDirectoryManager : RunnerService, ITempDirectoryManager + { + private string _tempDirectory; + + public override void Initialize(IHostContext hostContext) + { + base.Initialize(hostContext); + _tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp); + } + + public void InitializeTempDirectory(IExecutionContext jobContext) + { + ArgUtil.NotNull(jobContext, nameof(jobContext)); + ArgUtil.NotNullOrEmpty(_tempDirectory, nameof(_tempDirectory)); + jobContext.SetRunnerContext("temp", _tempDirectory); + jobContext.Debug($"Cleaning runner temp folder: {_tempDirectory}"); + try + { + IOUtil.DeleteDirectory(_tempDirectory, contentsOnly: true, continueOnContentDeleteError: true, cancellationToken: jobContext.CancellationToken); + } + catch (Exception ex) + { + Trace.Error(ex); + } + finally + { + // make sure folder exists + Directory.CreateDirectory(_tempDirectory); + } + } + + public void CleanupTempDirectory() + { + ArgUtil.NotNullOrEmpty(_tempDirectory, nameof(_tempDirectory)); + Trace.Info($"Cleaning runner temp folder: {_tempDirectory}"); + try + { + IOUtil.DeleteDirectory(_tempDirectory, contentsOnly: true, continueOnContentDeleteError: true, cancellationToken: CancellationToken.None); + } + catch (Exception ex) + { + Trace.Error(ex); + } + } + } +} diff --git a/src/Runner.Worker/TrackingConfig.cs b/src/Runner.Worker/TrackingConfig.cs new file mode 100644 index 00000000000..7a99af51eea --- /dev/null +++ b/src/Runner.Worker/TrackingConfig.cs @@ -0,0 +1,119 @@ +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using Newtonsoft.Json; +using System; +using System.ComponentModel; +using System.Globalization; +using System.IO; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; +using System.Collections.Generic; + +namespace GitHub.Runner.Worker +{ + public sealed class RepositoryTrackingConfig + { + public string RepositoryPath { get; set; } + + [JsonIgnore] + public DateTimeOffset? LastRunOn { get; set; } + + [JsonProperty("LastRunOn")] + [EditorBrowsableAttribute(EditorBrowsableState.Never)] + public string LastRunOnString + { + get + { + return string.Format(CultureInfo.InvariantCulture, "{0}", LastRunOn); + } + + set + { + if (string.IsNullOrEmpty(value)) + { + LastRunOn = null; + return; + } + + LastRunOn = DateTimeOffset.Parse(value, CultureInfo.InvariantCulture); + } + } + } + + public sealed class TrackingConfig + { + // The parameterless constructor is required for deserialization. + public TrackingConfig() + { + } + + public TrackingConfig(IExecutionContext executionContext) + { + var repoFullName = executionContext.GetGitHubContext("repository"); + ArgUtil.NotNullOrEmpty(repoFullName, nameof(repoFullName)); + RepositoryName = repoFullName; + + var repoName = repoFullName.Substring(repoFullName.LastIndexOf('/') + 1); + ArgUtil.NotNullOrEmpty(repoName, nameof(repoName)); + + // Set the directories. + PipelineDirectory = repoName.ToString(CultureInfo.InvariantCulture); + WorkspaceDirectory = Path.Combine(PipelineDirectory, repoName); + + Repositories[repoFullName] = new RepositoryTrackingConfig() + { + LastRunOn = DateTimeOffset.Now, + RepositoryPath = WorkspaceDirectory + }; + + // Set the other properties. + LastRunOn = DateTimeOffset.Now; + } + + private Dictionary _repositories; + + public string RepositoryName { get; set; } + + public string PipelineDirectory { get; set; } + + public string WorkspaceDirectory { get; set; } + + public Dictionary Repositories + { + get + { + if (_repositories == null) + { + _repositories = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return _repositories; + } + } + + [JsonIgnore] + public DateTimeOffset? LastRunOn { get; set; } + + [JsonProperty("LastRunOn")] + [EditorBrowsableAttribute(EditorBrowsableState.Never)] + public string LastRunOnString + { + get + { + return string.Format(CultureInfo.InvariantCulture, "{0}", LastRunOn); + } + + set + { + if (string.IsNullOrEmpty(value)) + { + LastRunOn = null; + return; + } + + LastRunOn = DateTimeOffset.Parse(value, CultureInfo.InvariantCulture); + } + } + } +} diff --git a/src/Runner.Worker/TrackingManager.cs b/src/Runner.Worker/TrackingManager.cs new file mode 100644 index 00000000000..b5eb7c8756e --- /dev/null +++ b/src/Runner.Worker/TrackingManager.cs @@ -0,0 +1,74 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using Newtonsoft.Json; +using System; +using System.IO; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Globalization; +using GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(TrackingManager))] + public interface ITrackingManager : IRunnerService + { + TrackingConfig Create(IExecutionContext executionContext, string file); + + TrackingConfig LoadIfExists(IExecutionContext executionContext, string file); + + void Update(IExecutionContext executionContext, TrackingConfig config, string file); + } + + public sealed class TrackingManager : RunnerService, ITrackingManager + { + public TrackingConfig Create( + IExecutionContext executionContext, + string file) + { + Trace.Entering(); + + // Create the new tracking config. + TrackingConfig config = new TrackingConfig(executionContext); + WriteToFile(file, config); + return config; + } + + public TrackingConfig LoadIfExists( + IExecutionContext executionContext, + string file) + { + Trace.Entering(); + + // The tracking config will not exist for a new definition. + if (!File.Exists(file)) + { + return null; + } + + return IOUtil.LoadObject(file); + } + + public void Update( + IExecutionContext executionContext, + TrackingConfig config, + string file) + { + Trace.Entering(); + WriteToFile(file, config); + } + + private void WriteToFile(string file, object value) + { + Trace.Entering(); + Trace.Verbose($"Writing config to file: {file}"); + + // Create the directory if it does not exist. + Directory.CreateDirectory(Path.GetDirectoryName(file)); + IOUtil.SaveObject(value, file); + } + } +} diff --git a/src/Runner.Worker/Variables.cs b/src/Runner.Worker/Variables.cs new file mode 100644 index 00000000000..3110ef5480a --- /dev/null +++ b/src/Runner.Worker/Variables.cs @@ -0,0 +1,221 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Linq; +using GitHub.DistributedTask.Logging; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + public sealed class Variables + { + private readonly IHostContext _hostContext; + private readonly ConcurrentDictionary _variables = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + private readonly ISecretMasker _secretMasker; + private readonly object _setLock = new object(); + private readonly Tracing _trace; + + public IEnumerable AllVariables + { + get + { + return _variables.Values; + } + } + + public Variables(IHostContext hostContext, IDictionary copy) + { + // Store/Validate args. + _hostContext = hostContext; + _secretMasker = _hostContext.SecretMasker; + _trace = _hostContext.GetTrace(nameof(Variables)); + ArgUtil.NotNull(hostContext, nameof(hostContext)); + + // Validate the dictionary, remove any variable with empty variable name. + ArgUtil.NotNull(copy, nameof(copy)); + if (copy.Keys.Any(k => string.IsNullOrWhiteSpace(k))) + { + _trace.Info($"Remove {copy.Keys.Count(k => string.IsNullOrWhiteSpace(k))} variables with empty variable name."); + } + + // Initialize the variable dictionary. + List variables = new List(); + foreach (var variable in copy) + { + if (!string.IsNullOrWhiteSpace(variable.Key)) + { + variables.Add(new Variable(variable.Key, variable.Value.Value, variable.Value.IsSecret)); + } + } + + foreach (Variable variable in variables) + { + // Store the variable. The initial secret values have already been + // registered by the Worker class. + _variables[variable.Name] = variable; + } + } + + // DO NOT add file path variable to here. + // All file path variables needs to be retrive and set through ExecutionContext, so it can handle container file path translation. + + public string Build_DefinitionName => Get(Constants.Variables.Build.DefinitionName); + + public string Build_Number => Get(Constants.Variables.Build.Number); + +#if OS_WINDOWS + public bool Retain_Default_Encoding => false; +#else + public bool Retain_Default_Encoding => true; +#endif + + public string System_CollectionId => Get(Constants.Variables.System.CollectionId); + + public bool? Step_Debug => GetBoolean(Constants.Variables.Actions.StepDebug); + + public string System_DefinitionId => Get(Constants.Variables.System.DefinitionId); + + public string System_PhaseDisplayName => Get(Constants.Variables.System.PhaseDisplayName); + + public string System_TFCollectionUrl => Get(WellKnownDistributedTaskVariables.TFCollectionUrl); + + public static readonly HashSet PiiVariables = new HashSet(StringComparer.OrdinalIgnoreCase) + { + "Build.AuthorizeAs", + "Build.QueuedBy", + "Build.RequestedFor", + "Build.RequestedForEmail", + "Build.SourceBranch", + "Build.SourceBranchName", + "Build.SourceTfvcShelveset", + "Build.SourceVersion", + "Build.SourceVersionAuthor", + "Job.AuthorizeAs", + "Release.Deployment.RequestedFor", + "Release.Deployment.RequestedForEmail", + "Release.RequestedFor", + "Release.RequestedForEmail", + }; + + public static readonly string PiiArtifactVariablePrefix = "Release.Artifacts"; + + public static readonly List PiiArtifactVariableSuffixes = new List() + { + "SourceBranch", + "SourceBranchName", + "SourceVersion", + "RequestedFor" + }; + + public string Get(string name) + { + Variable variable; + if (_variables.TryGetValue(name, out variable)) + { + _trace.Verbose($"Get '{name}': '{variable.Value}'"); + return variable.Value; + } + + _trace.Verbose($"Get '{name}' (not found)"); + return null; + } + + public bool? GetBoolean(string name) + { + bool val; + if (bool.TryParse(Get(name), out val)) + { + return val; + } + + return null; + } + + public T? GetEnum(string name) where T : struct + { + return EnumUtil.TryParse(Get(name)); + } + + public Guid? GetGuid(string name) + { + Guid val; + if (Guid.TryParse(Get(name), out val)) + { + return val; + } + + return null; + } + + public int? GetInt(string name) + { + int val; + if (int.TryParse(Get(name), out val)) + { + return val; + } + + return null; + } + + public long? GetLong(string name) + { + long val; + if (long.TryParse(Get(name), out val)) + { + return val; + } + + return null; + } + + public bool TryGetValue(string name, out string val) + { + Variable variable; + if (_variables.TryGetValue(name, out variable)) + { + val = variable.Value; + _trace.Verbose($"Get '{name}': '{val}'"); + return true; + } + + val = null; + _trace.Verbose($"Get '{name}' (not found)"); + return false; + } + + public DictionaryContextData ToSecretsContext() + { + var result = new DictionaryContextData(); + foreach (var variable in _variables.Values) + { + if (variable.Secret && + !string.Equals(variable.Name, Constants.Variables.System.AccessToken, StringComparison.OrdinalIgnoreCase) && + !string.Equals(variable.Name, "system.github.token", StringComparison.OrdinalIgnoreCase)) + { + result[variable.Name] = new StringContextData(variable.Value); + } + } + return result; + } + } + + public sealed class Variable + { + public string Name { get; private set; } + public bool Secret { get; private set; } + public string Value { get; private set; } + + public Variable(string name, string value, bool secret) + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + Name = name; + Value = value ?? string.Empty; + Secret = secret; + } + } +} diff --git a/src/Runner.Worker/Worker.cs b/src/Runner.Worker/Worker.cs new file mode 100644 index 00000000000..26d1a895aa6 --- /dev/null +++ b/src/Runner.Worker/Worker.cs @@ -0,0 +1,215 @@ +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Common.Util; +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.WebApi; +using GitHub.Runner.Common; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + [ServiceLocator(Default = typeof(Worker))] + public interface IWorker : IRunnerService + { + Task RunAsync(string pipeIn, string pipeOut); + } + + public sealed class Worker : RunnerService, IWorker + { + private readonly TimeSpan _workerStartTimeout = TimeSpan.FromSeconds(30); + private ManualResetEvent _completedCommand = new ManualResetEvent(false); + + // Do not mask the values of these secrets + private static HashSet SecretVariableMaskWhitelist = new HashSet(StringComparer.OrdinalIgnoreCase){ + Constants.Variables.Actions.StepDebug, + Constants.Variables.Actions.RunnerDebug + }; + + public async Task RunAsync(string pipeIn, string pipeOut) + { + try + { + // Setup way to handle SIGTERM/unloading signals + _completedCommand.Reset(); + HostContext.Unloading += Worker_Unloading; + + // Validate args. + ArgUtil.NotNullOrEmpty(pipeIn, nameof(pipeIn)); + ArgUtil.NotNullOrEmpty(pipeOut, nameof(pipeOut)); + var runnerWebProxy = HostContext.GetService(); + var runnerCertManager = HostContext.GetService(); + VssUtil.InitializeVssClientSettings(HostContext.UserAgent, runnerWebProxy.WebProxy, runnerCertManager.VssClientCertificateManager); + var jobRunner = HostContext.CreateService(); + + using (var channel = HostContext.CreateService()) + using (var jobRequestCancellationToken = CancellationTokenSource.CreateLinkedTokenSource(HostContext.RunnerShutdownToken)) + using (var channelTokenSource = new CancellationTokenSource()) + { + // Start the channel. + channel.StartClient(pipeIn, pipeOut); + + // Wait for up to 30 seconds for a message from the channel. + HostContext.WritePerfCounter("WorkerWaitingForJobMessage"); + Trace.Info("Waiting to receive the job message from the channel."); + WorkerMessage channelMessage; + using (var csChannelMessage = new CancellationTokenSource(_workerStartTimeout)) + { + channelMessage = await channel.ReceiveAsync(csChannelMessage.Token); + } + + // Deserialize the job message. + Trace.Info("Message received."); + ArgUtil.Equal(MessageType.NewJobRequest, channelMessage.MessageType, nameof(channelMessage.MessageType)); + ArgUtil.NotNullOrEmpty(channelMessage.Body, nameof(channelMessage.Body)); + var jobMessage = StringUtil.ConvertFromJson(channelMessage.Body); + ArgUtil.NotNull(jobMessage, nameof(jobMessage)); + HostContext.WritePerfCounter($"WorkerJobMessageReceived_{jobMessage.RequestId.ToString()}"); + + // Initialize the secret masker and set the thread culture. + InitializeSecretMasker(jobMessage); + SetCulture(jobMessage); + + // Start the job. + Trace.Info($"Job message:{Environment.NewLine} {StringUtil.ConvertToJson(WorkerUtilities.ScrubPiiData(jobMessage))}"); + Task jobRunnerTask = jobRunner.RunAsync(jobMessage, jobRequestCancellationToken.Token); + + // Start listening for a cancel message from the channel. + Trace.Info("Listening for cancel message from the channel."); + Task channelTask = channel.ReceiveAsync(channelTokenSource.Token); + + // Wait for one of the tasks to complete. + Trace.Info("Waiting for the job to complete or for a cancel message from the channel."); + Task.WaitAny(jobRunnerTask, channelTask); + // Handle if the job completed. + if (jobRunnerTask.IsCompleted) + { + Trace.Info("Job completed."); + channelTokenSource.Cancel(); // Cancel waiting for a message from the channel. + return TaskResultUtil.TranslateToReturnCode(await jobRunnerTask); + } + + // Otherwise a cancel message was received from the channel. + Trace.Info("Cancellation/Shutdown message received."); + channelMessage = await channelTask; + switch (channelMessage.MessageType) + { + case MessageType.CancelRequest: + jobRequestCancellationToken.Cancel(); // Expire the host cancellation token. + break; + case MessageType.RunnerShutdown: + HostContext.ShutdownRunner(ShutdownReason.UserCancelled); + break; + case MessageType.OperatingSystemShutdown: + HostContext.ShutdownRunner(ShutdownReason.OperatingSystemShutdown); + break; + default: + throw new ArgumentOutOfRangeException(nameof(channelMessage.MessageType), channelMessage.MessageType, nameof(channelMessage.MessageType)); + } + + // Await the job. + return TaskResultUtil.TranslateToReturnCode(await jobRunnerTask); + } + } + finally + { + HostContext.Unloading -= Worker_Unloading; + _completedCommand.Set(); + } + } + + private void InitializeSecretMasker(Pipelines.AgentJobRequestMessage message) + { + Trace.Entering(); + ArgUtil.NotNull(message, nameof(message)); + ArgUtil.NotNull(message.Resources, nameof(message.Resources)); + + // Add mask hints for secret variables + foreach (var variable in (message.Variables ?? new Dictionary())) + { + // Need to ignore values on whitelist + if (variable.Value.IsSecret && !SecretVariableMaskWhitelist.Contains(variable.Key)) + { + var value = variable.Value.Value?.Trim() ?? string.Empty; + + // Add the entire value, even if it contains CR or LF. During expression tracing, + // invidual trace info may contain line breaks. + HostContext.SecretMasker.AddValue(value); + + // Also add each individual line. Typically individual lines are processed from STDOUT of child processes. + var split = value.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var item in split) + { + HostContext.SecretMasker.AddValue(item.Trim()); + } + } + } + + // Add mask hints + foreach (MaskHint maskHint in (message.MaskHints ?? new List())) + { + if (maskHint.Type == MaskType.Regex) + { + HostContext.SecretMasker.AddRegex(maskHint.Value); + + // We need this because the worker will print out the job message JSON to diag log + // and SecretMasker has JsonEscapeEncoder hook up + HostContext.SecretMasker.AddValue(maskHint.Value); + } + else + { + // TODO: Should we fail instead? Do any additional pains need to be taken here? Should the job message not be traced? + Trace.Warning($"Unsupported mask type '{maskHint.Type}'."); + } + } + + // TODO: Avoid adding redundant secrets. If the endpoint auth matches the system connection, then it's added as a value secret and as a regex secret. Once as a value secret b/c of the following code that iterates over each endpoint. Once as a regex secret due to the hint sent down in the job message. + + // Add masks for service endpoints + foreach (ServiceEndpoint endpoint in message.Resources.Endpoints ?? new List()) + { + foreach (string value in endpoint.Authorization?.Parameters?.Values ?? new string[0]) + { + if (!string.IsNullOrEmpty(value)) + { + HostContext.SecretMasker.AddValue(value); + } + } + } + + // Add masks for secure file download tickets + foreach (SecureFile file in message.Resources.SecureFiles ?? new List()) + { + if (!string.IsNullOrEmpty(file.Ticket)) + { + HostContext.SecretMasker.AddValue(file.Ticket); + } + } + } + + private void SetCulture(Pipelines.AgentJobRequestMessage message) + { + // Extract the culture name from the job's variable dictionary. + VariableValue culture; + ArgUtil.NotNull(message, nameof(message)); + ArgUtil.NotNull(message.Variables, nameof(message.Variables)); + if (message.Variables.TryGetValue(Constants.Variables.System.Culture, out culture)) + { + // Set the default thread culture. + HostContext.SetDefaultCulture(culture.Value); + } + } + + private void Worker_Unloading(object sender, EventArgs e) + { + if (!HostContext.RunnerShutdownToken.IsCancellationRequested) + { + HostContext.ShutdownRunner(ShutdownReason.UserCancelled); + _completedCommand.WaitOne(Constants.Runner.ExitOnUnloadTimeout); + } + } + } +} diff --git a/src/Runner.Worker/WorkerUtilties.cs b/src/Runner.Worker/WorkerUtilties.cs new file mode 100644 index 00000000000..ac147046943 --- /dev/null +++ b/src/Runner.Worker/WorkerUtilties.cs @@ -0,0 +1,92 @@ +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Worker +{ + public class WorkerUtilities + { + public static Pipelines.AgentJobRequestMessage ScrubPiiData(Pipelines.AgentJobRequestMessage message) + { + ArgUtil.NotNull(message, nameof(message)); + + var scrubbedVariables = new Dictionary(); + + // Scrub the known PII variables + foreach (var variable in message.Variables) + { + if (Variables.PiiVariables.Contains(variable.Key) || + (variable.Key.StartsWith(Variables.PiiArtifactVariablePrefix, StringComparison.OrdinalIgnoreCase) + && Variables.PiiArtifactVariableSuffixes.Any(varSuffix => variable.Key.EndsWith(varSuffix, StringComparison.OrdinalIgnoreCase)))) + { + scrubbedVariables[variable.Key] = "[PII]"; + } + else + { + scrubbedVariables[variable.Key] = variable.Value; + } + } + + var scrubbedRepositories = new List(); + + // Scrub the repository resources + foreach (var repository in message.Resources.Repositories) + { + Pipelines.RepositoryResource scrubbedRepository = repository.Clone(); + + var versionInfo = repository.Properties.Get(Pipelines.RepositoryPropertyNames.VersionInfo); + + if (versionInfo != null) + { + scrubbedRepository.Properties.Set( + Pipelines.RepositoryPropertyNames.VersionInfo, + new Pipelines.VersionInfo() + { + Author = "[PII]", + Message = versionInfo.Message + }); + } + + scrubbedRepositories.Add(scrubbedRepository); + } + + var scrubbedJobResources = new Pipelines.JobResources(); + + scrubbedJobResources.Containers.AddRange(message.Resources.Containers); + scrubbedJobResources.Endpoints.AddRange(message.Resources.Endpoints); + scrubbedJobResources.Repositories.AddRange(scrubbedRepositories); + scrubbedJobResources.SecureFiles.AddRange(message.Resources.SecureFiles); + + var contextData = new DictionaryContextData(); + if (message.ContextData?.Count > 0) + { + foreach (var pair in message.ContextData) + { + contextData[pair.Key] = pair.Value; + } + } + + // Reconstitute a new agent job request message from the scrubbed parts + return new Pipelines.AgentJobRequestMessage( + plan: message.Plan, + timeline: message.Timeline, + jobId: message.JobId, + jobDisplayName: message.JobDisplayName, + jobName: message.JobName, + jobContainer: message.JobContainer, + jobServiceContainers: message.JobServiceContainers, + environmentVariables: message.EnvironmentVariables, + variables: scrubbedVariables, + maskHints: message.MaskHints, + jobResources: scrubbedJobResources, + contextData: contextData, + workspaceOptions: message.Workspace, + steps: message.Steps, + scopes: message.Scopes); + } + } +} diff --git a/src/Runner.Worker/action_yaml.json b/src/Runner.Worker/action_yaml.json new file mode 100644 index 00000000000..a30de160674 --- /dev/null +++ b/src/Runner.Worker/action_yaml.json @@ -0,0 +1,106 @@ +{ + "definitions": { + "action-root": { + "description": "Action file", + "mapping": { + "properties": { + "name": "string", + "description": "string", + "inputs": "inputs", + "runs": "runs" + }, + "loose-key-type": "non-empty-string", + "loose-value-type": "any" + } + }, + "inputs": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "input" + } + }, + "input": { + "mapping": { + "properties": { + "default": "input-default-context" + }, + "loose-key-type": "non-empty-string", + "loose-value-type": "any" + } + }, + "runs": { + "one-of": [ + "container-runs", + "node12-runs", + "plugin-runs" + ] + }, + "container-runs": { + "mapping": { + "properties": { + "using": "non-empty-string", + "image": "non-empty-string", + "entrypoint": "non-empty-string", + "args": "container-runs-args", + "env": "container-runs-env", + "post-entrypoint": "non-empty-string", + "post-if": "non-empty-string" + } + } + }, + "container-runs-args": { + "sequence": { + "item-type": "container-runs-context" + } + }, + "container-runs-env": { + "context": [ + "inputs" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + "node12-runs": { + "mapping": { + "properties": { + "using": "non-empty-string", + "main": "non-empty-string", + "post": "non-empty-string", + "post-if": "non-empty-string" + } + } + }, + "plugin-runs": { + "mapping": { + "properties": { + "plugin": "non-empty-string" + } + } + }, + "container-runs-context": { + "context": [ + "inputs" + ], + "string": {} + }, + "input-default-context": { + "context": [ + "github", + "strategy", + "matrix", + "steps", + "job", + "runner", + "env" + ], + "string": {} + }, + "non-empty-string": { + "string": { + "require-non-empty": true + } + } + } +} \ No newline at end of file diff --git a/src/Sdk/AadAuthentication/CookieUtility.cs b/src/Sdk/AadAuthentication/CookieUtility.cs new file mode 100644 index 00000000000..ea997564688 --- /dev/null +++ b/src/Sdk/AadAuthentication/CookieUtility.cs @@ -0,0 +1,264 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Runtime.InteropServices; +using System.Text; +using GitHub.Services.Common; + +namespace GitHub.Services.Client +{ + internal static class CookieUtility + { + public static readonly String AcsMetadataRetrievalExceptionText = "Unable to retrieve ACS Metadata from '{0}'"; + public static readonly String FedAuthCookieName = "FedAuth"; + public static readonly String WindowsLiveSignOutUrl = "https://login.live.com/uilogout.srf"; + public static readonly Uri WindowsLiveCookieDomain = new Uri("https://login.live.com/"); + + public static CookieCollection GetFederatedCookies(Uri cookieDomainAndPath) + { + CookieCollection result = null; + + Cookie cookie = GetCookieEx(cookieDomainAndPath, FedAuthCookieName).FirstOrDefault(); + + if (cookie != null) + { + result = new CookieCollection(); + result.Add(cookie); + + for (Int32 x = 1; x < 50; x++) + { + String cookieName = FedAuthCookieName + x; + cookie = GetCookieEx(cookieDomainAndPath, cookieName).FirstOrDefault(); + + if (cookie != null) + { + result.Add(cookie); + } + else + { + break; + } + } + } + + return result; + } + + public static CookieCollection GetFederatedCookies(String[] token) + { + CookieCollection result = null; + + if (token != null && token.Length > 0 && token[0] != null) + { + result = new CookieCollection(); + result.Add(new Cookie(FedAuthCookieName, token[0])); + + for (Int32 x = 1; x < token.Length; x++) + { + String cookieName = FedAuthCookieName + x; + + if (token[x] != null) + { + Cookie cookie = new Cookie(cookieName, token[x]); + cookie.HttpOnly = true; + result.Add(cookie); + } + else + { + break; + } + } + } + + return result; + } + + public static CookieCollection GetFederatedCookies(IHttpResponse webResponse) + { + CookieCollection result = null; + IEnumerable cookies = null; + + if (webResponse.Headers.TryGetValues("Set-Cookie", out cookies)) + { + foreach (String cookie in cookies) + { + if (cookie != null && cookie.StartsWith(CookieUtility.FedAuthCookieName, StringComparison.OrdinalIgnoreCase)) + { + // Only take the security token field of the cookie, and discard the rest + String fedAuthToken = cookie.Split(';').FirstOrDefault(); + Int32 index = fedAuthToken.IndexOf('='); + + if (index > 0 && index < fedAuthToken.Length - 1) + { + String name = fedAuthToken.Substring(0, index); + String value = fedAuthToken.Substring(index + 1); + + result = result ?? new CookieCollection(); + result.Add(new Cookie(name, value)); + } + } + } + } + + return result; + } + + public static CookieCollection GetAllCookies(Uri cookieDomainAndPath) + { + CookieCollection result = null; + List cookies = GetCookieEx(cookieDomainAndPath, null); + foreach (Cookie cookie in cookies) + { + if (result == null) + { + result = new CookieCollection(); + } + + result.Add(cookie); + } + + return result; + } + + public static void DeleteFederatedCookies(Uri cookieDomainAndPath) + { + CookieCollection cookies = GetFederatedCookies(cookieDomainAndPath); + + if (cookies != null) + { + foreach (Cookie cookie in cookies) + { + DeleteCookieEx(cookieDomainAndPath, cookie.Name); + } + } + } + + public static void DeleteWindowsLiveCookies() + { + DeleteAllCookies(WindowsLiveCookieDomain); + } + + public static void DeleteAllCookies(Uri cookieDomainAndPath) + { + CookieCollection cookies = GetAllCookies(cookieDomainAndPath); + + if (cookies != null) + { + foreach (Cookie cookie in cookies) + { + DeleteCookieEx(cookieDomainAndPath, cookie.Name); + } + } + } + + public const UInt32 INTERNET_COOKIE_HTTPONLY = 0x00002000; + + [DllImport("wininet.dll", SetLastError = true, CharSet = CharSet.Unicode)] + static extern bool InternetGetCookieEx( + String url, String cookieName, StringBuilder cookieData, ref Int32 size, UInt32 flags, IntPtr reserved); + + [DllImport("wininet.dll", SetLastError = true, CharSet = CharSet.Unicode)] + static extern bool InternetSetCookieEx( + String url, String cookieName, String cookieData, UInt32 flags, IntPtr reserved); + + public static Boolean DeleteCookieEx(Uri cookiePath, String cookieName) + { + UInt32 flags = INTERNET_COOKIE_HTTPONLY; + + String path = cookiePath.ToString(); + if (!path.EndsWith("/", StringComparison.Ordinal)) + { + path = path + "/"; + } + + DateTime expiration = DateTime.UtcNow.AddYears(-1); + String cookieData = String.Format(CultureInfo.InvariantCulture, "{0}=0;expires={1};path=/;domain={2};httponly", cookieName, expiration.ToString("R"), cookiePath.Host); + + return InternetSetCookieEx(path, null, cookieData, flags, IntPtr.Zero); + } + + public static Boolean SetCookiesEx( + Uri cookiePath, + CookieCollection cookies) + { + String path = cookiePath.ToString(); + if (!path.EndsWith("/", StringComparison.Ordinal)) + { + path = path + "/"; + } + + Boolean successful = true; + foreach (Cookie cookie in cookies) + { + // This means it doesn't expire + if (cookie.Expires.Year == 1) + { + continue; + } + + String cookieData = String.Format(CultureInfo.InvariantCulture, + "{0}; path={1}; domain={2}; expires={3}; httponly", + cookie.Value, + cookie.Path, + cookie.Domain, + cookie.Expires.ToString("ddd, dd-MMM-yyyy HH:mm:ss 'GMT'")); + + successful &= InternetSetCookieEx(path, cookie.Name, cookieData, INTERNET_COOKIE_HTTPONLY, IntPtr.Zero); + } + return successful; + } + + public static List GetCookieEx(Uri cookiePath, String cookieName) + { + UInt32 flags = INTERNET_COOKIE_HTTPONLY; + + List cookies = new List(); + Int32 size = 256; + StringBuilder cookieData = new StringBuilder(size); + String path = cookiePath.ToString(); + if (!path.EndsWith("/", StringComparison.Ordinal)) + { + path = path + "/"; + } + + if (!InternetGetCookieEx(path, cookieName, cookieData, ref size, flags, IntPtr.Zero)) + { + if (size < 0) + { + return cookies; + } + + cookieData = new StringBuilder(size); + + if (!InternetGetCookieEx(path, cookieName, cookieData, ref size, flags, IntPtr.Zero)) + { + return cookies; + } + } + + if (cookieData.Length > 0) + { + String[] cookieSections = cookieData.ToString().Split(new char[] { ';' }); + + foreach (String cookieSection in cookieSections) + { + String[] cookieParts = cookieSection.Split(new char[] { '=' }, 2); + + if (cookieParts.Length == 2) + { + Cookie cookie = new Cookie(); + cookie.Name = cookieParts[0].TrimStart(); + cookie.Value = cookieParts[1]; + cookie.HttpOnly = true; + cookies.Add(cookie); + } + } + } + + return cookies; + } + } +} diff --git a/src/Sdk/AadAuthentication/VssAadCredential.cs b/src/Sdk/AadAuthentication/VssAadCredential.cs new file mode 100644 index 00000000000..92f365d5fe3 --- /dev/null +++ b/src/Sdk/AadAuthentication/VssAadCredential.cs @@ -0,0 +1,95 @@ +using System; +using System.Net.Http; +using System.Security; +using GitHub.Services.Common; + +namespace GitHub.Services.Client +{ + /// + /// Currently it is impossible to get whether prompting is allowed from the credential itself without reproducing the logic + /// used by VssClientCredentials. Since this is a stop gap solution to get Windows integrated authentication to work against + /// AAD via ADFS for now this class will only support that one, non-interactive flow. We need to assess how much we want to + /// invest in this legacy stack rather than recommending people move to the VssConnect API for future authentication needs. + /// + [Serializable] + public sealed class VssAadCredential : FederatedCredential + { + private string username; + private SecureString password; + + public VssAadCredential() + : base(null) + { + } + + public VssAadCredential(VssAadToken initialToken) + : base(initialToken) + { + } + + public VssAadCredential(string username) + : base(null) + { + this.username = username; + } + + public VssAadCredential(string username, string password) + : base(null) + { + this.username = username; + + if (password != null) + { + this.password = new SecureString(); + + foreach (char character in password) + { + this.password.AppendChar(character); + } + } + } + + public VssAadCredential(string username, SecureString password) + : base(null) + { + this.username = username; + this.password = password; + } + + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Aad; + } + } + + internal string Username + { + get + { + return username; + } + } + + internal SecureString Password => password; + + public override bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + bool isNonAuthenticationChallenge = false; + return VssFederatedCredential.IsVssFederatedAuthenticationChallenge(webResponse, out isNonAuthenticationChallenge) ?? false; + } + + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + if (response == null && base.InitialToken == null) + { + return null; + } + + return new VssAadTokenProvider(this); + } + } +} diff --git a/src/Sdk/AadAuthentication/VssAadSettings.cs b/src/Sdk/AadAuthentication/VssAadSettings.cs new file mode 100644 index 00000000000..651d642c2db --- /dev/null +++ b/src/Sdk/AadAuthentication/VssAadSettings.cs @@ -0,0 +1,89 @@ +using System; +using System.Diagnostics; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.Services.Client +{ + internal static class VssAadSettings + { + public const string DefaultAadInstance = "https://login.microsoftonline.com/"; + + public const string CommonTenant = "common"; + + // VSTS service principal. + public const string Resource = "499b84ac-1321-427f-aa17-267ca6975798"; + + // Visual Studio IDE client ID originally provisioned by Azure Tools. + public const string Client = "872cd9fa-d31f-45e0-9eab-6e460a02d1f1"; + + // AAD Production Application tenant. + private const string ApplicationTenantId = "f8cdef31-a31e-4b4a-93e4-5f571e91255a"; + +#if !NETSTANDARD + public static Uri NativeClientRedirectUri + { + get + { + Uri nativeClientRedirect = null; + + try + { + string nativeRedirect = VssClientEnvironment.GetSharedConnectedUserValue(VssConnectionParameterOverrideKeys.AadNativeClientRedirect); + if (!string.IsNullOrEmpty(nativeRedirect)) + { + Uri.TryCreate(nativeRedirect, UriKind.RelativeOrAbsolute, out nativeClientRedirect); + } + } + catch (Exception e) + { + Debug.WriteLine(string.Format("NativeClientRedirectUri: {0}", e)); + } + + return nativeClientRedirect ?? new Uri("urn:ietf:wg:oauth:2.0:oob"); + } + } + + public static string ClientId + { + get + { + string nativeRedirect = VssClientEnvironment.GetSharedConnectedUserValue(VssConnectionParameterOverrideKeys.AadNativeClientIdentifier); + return nativeRedirect ?? VssAadSettings.Client; + } + } +#endif + + public static string AadInstance + { + get + { +#if !NETSTANDARD + string aadInstance = VssClientEnvironment.GetSharedConnectedUserValue(VssConnectionParameterOverrideKeys.AadInstance); +#else + string aadInstance = null; +#endif + + if (string.IsNullOrWhiteSpace(aadInstance)) + { + aadInstance = DefaultAadInstance; + } + else if (!aadInstance.EndsWith("/")) + { + aadInstance = aadInstance + "/"; + } + + return aadInstance; + } + } + +#if !NETSTANDARD + /// + /// Application tenant either from a registry override or a constant + /// + public static string ApplicationTenant => + VssClientEnvironment.GetSharedConnectedUserValue(VssConnectionParameterOverrideKeys.AadApplicationTenant) + ?? VssAadSettings.ApplicationTenantId; +#endif + } +} diff --git a/src/Sdk/AadAuthentication/VssAadToken.cs b/src/Sdk/AadAuthentication/VssAadToken.cs new file mode 100644 index 00000000000..debfad14720 --- /dev/null +++ b/src/Sdk/AadAuthentication/VssAadToken.cs @@ -0,0 +1,124 @@ +using System; +using Microsoft.IdentityModel.Clients.ActiveDirectory; +using GitHub.Services.Common; + +namespace GitHub.Services.Client +{ + [Serializable] + public class VssAadToken : IssuedToken + { + private string accessToken; + private string accessTokenType; + + private AuthenticationContext authenticationContext; + private UserCredential userCredential; + private VssAadTokenOptions options; + + public VssAadToken(AuthenticationResult authentication) + { + // Prevent any attempt to store this token. + this.FromStorage = true; + + if (!string.IsNullOrWhiteSpace(authentication.AccessToken)) + { + this.Authenticated(); + } + + this.accessToken = authentication.AccessToken; + this.accessTokenType = authentication.AccessTokenType; + } + + public VssAadToken( + string accessTokenType, + string accessToken) + { + // Prevent any attempt to store this token. + this.FromStorage = true; + + if (!string.IsNullOrWhiteSpace(accessToken) && !string.IsNullOrWhiteSpace(accessTokenType)) + { + this.Authenticated(); + } + + this.accessToken = accessToken; + this.accessTokenType = accessTokenType; + } + + public VssAadToken( + AuthenticationContext authenticationContext, + UserCredential userCredential = null, + VssAadTokenOptions options = VssAadTokenOptions.None) + { + // Prevent any attempt to store this token. + this.FromStorage = true; + + this.authenticationContext = authenticationContext; + this.userCredential = userCredential; + this.options = options; + } + + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Aad; + } + } + + public AuthenticationResult AcquireToken() + { + if (this.authenticationContext == null) + { + return null; + } + + AuthenticationResult authenticationResult = null; + + for (int index = 0; index < 3; index++) + { + try + { + if (this.userCredential == null && !options.HasFlag(VssAadTokenOptions.AllowDialog)) + { + authenticationResult = authenticationContext.AcquireTokenSilentAsync(VssAadSettings.Resource, VssAadSettings.Client).ConfigureAwait(false).GetAwaiter().GetResult(); + } + else + { + authenticationResult = authenticationContext.AcquireTokenAsync(VssAadSettings.Resource, VssAadSettings.Client, this.userCredential).ConfigureAwait(false).GetAwaiter().GetResult(); + } + + if (authenticationResult != null) + { + break; + } + } + catch (Exception x) + { + System.Diagnostics.Debug.WriteLine("Failed to get ADFS token: " + x.ToString()); + } + } + + return authenticationResult; + } + + internal override void ApplyTo(IHttpRequest request) + { + AuthenticationResult authenticationResult = AcquireToken(); + if (authenticationResult != null) + { + request.Headers.SetValue(Common.Internal.HttpHeaders.Authorization, $"{authenticationResult.AccessTokenType} {authenticationResult.AccessToken}"); + } + else if (!string.IsNullOrEmpty(this.accessTokenType) && !string.IsNullOrEmpty(this.accessToken)) + { + request.Headers.SetValue(Common.Internal.HttpHeaders.Authorization, $"{this.accessTokenType} {this.accessToken}"); + } + } + } + + [Flags] + public enum VssAadTokenOptions + { + None = 0, + AllowDialog = 1 + } +} diff --git a/src/Sdk/AadAuthentication/VssAadTokenProvider.cs b/src/Sdk/AadAuthentication/VssAadTokenProvider.cs new file mode 100644 index 00000000000..a10dbf1b5b3 --- /dev/null +++ b/src/Sdk/AadAuthentication/VssAadTokenProvider.cs @@ -0,0 +1,77 @@ +using System.Threading; +using System.Threading.Tasks; +using Microsoft.IdentityModel.Clients.ActiveDirectory; +using GitHub.Services.Common; + +namespace GitHub.Services.Client +{ + internal sealed class VssAadTokenProvider : IssuedTokenProvider + { + public VssAadTokenProvider(VssAadCredential credential) + : base(credential, null, null) + { + } + + public override bool GetTokenIsInteractive + { + get + { + return false; + } + } + + private VssAadToken GetVssAadToken() + { + AuthenticationContext authenticationContext = new AuthenticationContext(string.Concat(VssAadSettings.AadInstance, VssAadSettings.CommonTenant)); + UserCredential userCredential = null; + + VssAadCredential credential = this.Credential as VssAadCredential; + + if (credential?.Username != null) + { +#if NETSTANDARD + // UserPasswordCredential does not currently exist for ADAL 3.13.5 for any non-desktop build. + userCredential = new UserCredential(credential.Username); +#else + if (credential.Password != null) + { + userCredential = new UserPasswordCredential(credential.Username, credential.Password); + + } + else + { + userCredential = new UserCredential(credential.Username); + } +#endif + } + else + { + userCredential = new UserCredential(); + } + + return new VssAadToken(authenticationContext, userCredential); + } + + /// + /// Temporary implementation since we don't have a good configuration story here at the moment. + /// + protected override Task OnGetTokenAsync(IssuedToken failedToken, CancellationToken cancellationToken) + { + // If we have already tried to authenticate with an AAD token retrieved from Windows integrated authentication and it is not working, clear out state. + if (failedToken != null && failedToken.CredentialType == VssCredentialsType.Aad && failedToken.IsAuthenticated) + { + this.CurrentToken = null; + return Task.FromResult(null); + } + + try + { + return Task.FromResult(GetVssAadToken()); + } + catch + { } + + return Task.FromResult(null); + } + } +} diff --git a/src/Sdk/AadAuthentication/VssFederatedCredential.cs b/src/Sdk/AadAuthentication/VssFederatedCredential.cs new file mode 100644 index 00000000000..dcc5bda9792 --- /dev/null +++ b/src/Sdk/AadAuthentication/VssFederatedCredential.cs @@ -0,0 +1,172 @@ +using System; +using System.Linq; +using System.Net; +using GitHub.Services.Common; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Client +{ + /// + /// Provides federated authentication with a hosted VssConnection instance using cookies. + /// + [Serializable] + public sealed class VssFederatedCredential : FederatedCredential + { + /// + /// Initializes a new VssFederatedCredential instance. + /// + public VssFederatedCredential() + : this(true) + { + } + + /// + /// Initializes a new VssFederatedCredential instance. + /// + public VssFederatedCredential(Boolean useCache) + : this(useCache, null) + { + } + + /// + /// Initializes a new VssFederatedCredential instance. + /// + /// The initial token if available + public VssFederatedCredential(VssFederatedToken initialToken) + : this(false, initialToken) + { + } + + public VssFederatedCredential( + Boolean useCache, + VssFederatedToken initialToken) + : base(initialToken) + { +#if !NETSTANDARD + if (useCache) + { + Storage = new VssClientCredentialStorage(); + } +#endif + } + + /// + /// + /// + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Federated; + } + } + + public override Boolean IsAuthenticationChallenge(IHttpResponse webResponse) + { + bool isNonAuthenticationChallenge = false; + return IsVssFederatedAuthenticationChallenge(webResponse, out isNonAuthenticationChallenge) ?? isNonAuthenticationChallenge; + } + + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + // The response is only null when attempting to determine the most appropriate token provider to + // use for the connection. The only way we should do anything here is if we have an initial token + // since that means we can present something without making a server call. + if (response == null && base.InitialToken == null) + { + return null; + } + + Uri signInUrl = null; + String realm = String.Empty; + String issuer = String.Empty; + + if (response != null) + { + var location = response.Headers.GetValues(HttpHeaders.Location).FirstOrDefault(); + if (location == null) + { + location = response.Headers.GetValues(HttpHeaders.TfsFedAuthRedirect).FirstOrDefault(); + } + + if (!String.IsNullOrEmpty(location)) + { + signInUrl = new Uri(location); + } + + // Inform the server that we support the javascript notify "smart client" pattern for ACS auth + AddParameter(ref signInUrl, "protocol", "javascriptnotify"); + + // Do not automatically sign in with existing FedAuth cookie + AddParameter(ref signInUrl, "force", "1"); + + GetRealmAndIssuer(response, out realm, out issuer); + } + + return new VssFederatedTokenProvider(this, serverUrl, signInUrl, issuer, realm); + } + + internal static void GetRealmAndIssuer( + IHttpResponse response, + out String realm, + out String issuer) + { + realm = response.Headers.GetValues(HttpHeaders.TfsFedAuthRealm).FirstOrDefault(); + issuer = response.Headers.GetValues(HttpHeaders.TfsFedAuthIssuer).FirstOrDefault(); + + if (!String.IsNullOrWhiteSpace(issuer)) + { + issuer = new Uri(issuer).GetLeftPart(UriPartial.Authority); + } + } + + internal static Boolean? IsVssFederatedAuthenticationChallenge( + IHttpResponse webResponse, + out Boolean isNonAuthenticationChallenge) + { + isNonAuthenticationChallenge = false; + + if (webResponse == null) + { + return false; + } + + // Check to make sure that the redirect was issued from the Tfs service. We include the TfsServiceError + // header to avoid the possibility that a redirect from a non-tfs service is issued and we incorrectly + // launch the credentials UI. + if (webResponse.StatusCode == HttpStatusCode.Found || + webResponse.StatusCode == HttpStatusCode.Redirect) + { + return webResponse.Headers.GetValues(HttpHeaders.Location).Any() && webResponse.Headers.GetValues(HttpHeaders.TfsFedAuthRealm).Any(); + } + else if (webResponse.StatusCode == HttpStatusCode.Unauthorized) + { + return webResponse.Headers.GetValues(HttpHeaders.WwwAuthenticate).Any(x => x.StartsWith("TFS-Federated", StringComparison.OrdinalIgnoreCase)); + } + else if (webResponse.StatusCode == HttpStatusCode.Forbidden) + { + // This is not strictly an "authentication challenge" but it is a state the user can do something about so they can get access to the resource + // they are attempting to access. Specifically, the user will hit this when they need to update or create a profile required by business policy. + isNonAuthenticationChallenge = webResponse.Headers.GetValues(HttpHeaders.TfsFedAuthRedirect).Any(); + if (isNonAuthenticationChallenge) + { + return null; + } + } + + return false; + } + + private static void AddParameter(ref Uri uri, String name, String value) + { + if (uri.Query.IndexOf(String.Concat(name, "="), StringComparison.OrdinalIgnoreCase) < 0) + { + UriBuilder builder = new UriBuilder(uri); + builder.Query = String.Concat(builder.Query.TrimStart('?'), "&", name, "=", value); + uri = builder.Uri; + } + } + } +} diff --git a/src/Sdk/AadAuthentication/VssFederatedToken.cs b/src/Sdk/AadAuthentication/VssFederatedToken.cs new file mode 100644 index 00000000000..3e3cbed9d89 --- /dev/null +++ b/src/Sdk/AadAuthentication/VssFederatedToken.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Net; +using GitHub.Services.Common; + +namespace GitHub.Services.Client +{ + /// + /// Provides a cookie-based authentication token. + /// + [Serializable] + public sealed class VssFederatedToken : IssuedToken + { + /// + /// Initializes a new VssFederatedToken instance using the specified cookies. + /// + /// + public VssFederatedToken(CookieCollection cookies) + { + ArgumentUtility.CheckForNull(cookies, "cookies"); + m_cookies = cookies; + } + + /// + /// Returns the CookieCollection contained within this token. For internal use only. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public CookieCollection CookieCollection + { + get + { + return m_cookies; + } + } + + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Federated; + } + } + + internal override void ApplyTo(IHttpRequest request) + { + // From http://www.ietf.org/rfc/rfc2109.txt: + // Note: For backward compatibility, the separator in the Cookie header + // is semi-colon (;) everywhere. + // + // HttpRequestHeaders uses comma as the default separator, so instead of returning + // a list of cookies, the method returns one semicolon separated string. + IEnumerable values = request.Headers.GetValues(s_cookieHeader); + request.Headers.SetValue(s_cookieHeader, GetHeaderValue(values)); + } + + private String GetHeaderValue(IEnumerable cookieHeaders) + { + List currentCookies = new List(); + if (cookieHeaders != null) + { + foreach (String value in cookieHeaders) + { + currentCookies.AddRange(value.Split(';').Select(x => x.Trim())); + } + } + + currentCookies.RemoveAll(x => String.IsNullOrEmpty(x)); + + foreach (Cookie cookie in m_cookies) + { + // Remove all existing cookies that match the name of the cookie we are going to add. + currentCookies.RemoveAll(x => String.Equals(x.Substring(0, x.IndexOf('=')), cookie.Name, StringComparison.OrdinalIgnoreCase)); + currentCookies.Add(String.Concat(cookie.Name, "=", cookie.Value)); + } + + return String.Join("; ", currentCookies); + } + + private CookieCollection m_cookies; + private static readonly String s_cookieHeader = HttpRequestHeader.Cookie.ToString(); + } +} diff --git a/src/Sdk/AadAuthentication/VssFederatedTokenProvider.cs b/src/Sdk/AadAuthentication/VssFederatedTokenProvider.cs new file mode 100644 index 00000000000..e43f3de4c34 --- /dev/null +++ b/src/Sdk/AadAuthentication/VssFederatedTokenProvider.cs @@ -0,0 +1,157 @@ +using System; +using System.Net; +using System.Net.Http; +using GitHub.Services.Common; +using System.Globalization; + +namespace GitHub.Services.Client +{ + /// + /// Provides authentication for internet identities using single-sign-on cookies. + /// + internal sealed class VssFederatedTokenProvider : IssuedTokenProvider, ISupportSignOut + { + public VssFederatedTokenProvider( + VssFederatedCredential credential, + Uri serverUrl, + Uri signInUrl, + String issuer, + String realm) + : base(credential, serverUrl, signInUrl) + { + Issuer = issuer; + Realm = realm; + } + + protected override String AuthenticationScheme + { + get + { + return "TFS-Federated"; + } + } + + protected override String AuthenticationParameter + { + get + { + if (String.IsNullOrEmpty(this.Issuer) && String.IsNullOrEmpty(this.Realm)) + { + return String.Empty; + } + else + { + return String.Format(CultureInfo.InvariantCulture, "issuer=\"{0}\", realm=\"{1}\"", this.Issuer, this.Realm); + } + } + } + + /// + /// Gets the federated credential from which this provider was created. + /// + public new VssFederatedCredential Credential + { + get + { + return (VssFederatedCredential)base.Credential; + } + } + + /// + /// Gets a value indicating whether or not a call to get token will require interactivity. + /// + public override Boolean GetTokenIsInteractive + { + get + { + return this.CurrentToken == null; + } + } + + /// + /// Gets the issuer for the token provider. + /// + public String Issuer + { + get; + private set; + } + + /// + /// Gets the realm for the token provider. + /// + public String Realm + { + get; + private set; + } + + protected internal override Boolean IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (!base.IsAuthenticationChallenge(webResponse)) + { + return false; + } + + // This means we were proactively constructed without any connection information. In this case + // we return false to ensure that a new provider is reconstructed with all appropriate configuration + // to retrieve a new token. + if (this.SignInUrl == null) + { + return false; + } + + String realm, issuer; + VssFederatedCredential.GetRealmAndIssuer(webResponse, out realm, out issuer); + + return this.Realm.Equals(realm, StringComparison.OrdinalIgnoreCase) && + this.Issuer.Equals(issuer, StringComparison.OrdinalIgnoreCase); + } + + protected override IssuedToken OnValidatingToken( + IssuedToken token, + IHttpResponse webResponse) + { + // If the response has Set-Cookie headers, attempt to retrieve the FedAuth cookie from the response + // and replace the current token with the new FedAuth cookie. Note that the server only reissues the + // FedAuth cookie if it is issued for more than an hour. + CookieCollection fedAuthCookies = CookieUtility.GetFederatedCookies(webResponse); + + if (fedAuthCookies != null) + { + // The reissued token should have the same user information as the previous one. + VssFederatedToken federatedToken = new VssFederatedToken(fedAuthCookies) + { + Properties = token.Properties, + UserId = token.UserId, + UserName = token.UserName + }; + + token = federatedToken; + } + + return token; + } + + public void SignOut(Uri signOutUrl, Uri replyToUrl, String identityProvider) + { + // The preferred implementation is to follow the signOutUrl with a browser and kill the browser whenever it + // arrives at the replyToUrl (or if it bombs out somewhere along the way). + // This will work for all Web-based identity providers (Live, Google, Yahoo, Facebook) supported by ACS provided that + // the TFS server has registered sign-out urls (in the TF Registry) for each of these. + // This is the long-term approach that should be pursued and probably the approach recommended to other + // clients which don't have direct access to the cookie store (TEE?) + + // In the short term we are simply going to delete the TFS cookies and the Windows Live cookies that are exposed to this + // session. This has the drawback of not properly signing out of Live (you'd still be signed in to e.g. Hotmail, Xbox, MSN, etc.) + // but will allow the user to re-enter their live credentials and sign-in again to TFS. + // The other drawback is that the clients will have to be updated again when we pursue the implementation outlined above. + + CookieUtility.DeleteFederatedCookies(replyToUrl); + if (!String.IsNullOrEmpty(identityProvider) && identityProvider.Equals("Windows Live ID", StringComparison.OrdinalIgnoreCase)) + { + CookieUtility.DeleteWindowsLiveCookies(); + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/AgentTargetExecutionType.cs b/src/Sdk/BuildWebApi/Api/AgentTargetExecutionType.cs new file mode 100644 index 00000000000..b2dc96d564c --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/AgentTargetExecutionType.cs @@ -0,0 +1,12 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi { + + [GenerateAllConstants] + public static class AgentTargetExecutionType { + public const Int32 Normal = 0; + public const Int32 VariableMultipliers = 1; + public const Int32 MultipleAgents = 2; + } +} diff --git a/src/Sdk/BuildWebApi/Api/ArtifactResourceTypes.cs b/src/Sdk/BuildWebApi/Api/ArtifactResourceTypes.cs new file mode 100644 index 00000000000..c9480e87fb5 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/ArtifactResourceTypes.cs @@ -0,0 +1,76 @@ +using System; +using System.ComponentModel; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [Obsolete("Use ArtifactResourceTypes instead.")] + [EditorBrowsable(EditorBrowsableState.Never)] + public static class WellKnownArtifactResourceTypes + { + public const String FilePath = ArtifactResourceTypes.FilePath; + public const String SymbolStore = ArtifactResourceTypes.SymbolStore; + public const String VersionControl = ArtifactResourceTypes.VersionControl; + public const String Container = ArtifactResourceTypes.Container; + public const String GitRef = ArtifactResourceTypes.GitRef; + public const String TfvcLabel = ArtifactResourceTypes.TfvcLabel; + public const String SymbolRequest = ArtifactResourceTypes.SymbolRequest; + } + + [GenerateAllConstants] + public static class ArtifactResourceTypes + { + /// + /// UNC or local folder path + /// E.g. \\vscsstor\CIDrops\CloudU.Gated\140317.115955 or file://vscsstor/CIDrops/CloudU.Gated/140317.115955 + /// + public const String FilePath = "FilePath"; + + /// + /// Symbol store UNC path + /// E.g. \\symbolstore + /// + public const String SymbolStore = "SymbolStore"; + + /// + /// TF VC server folder path + /// E.g. $/Dev1/Drops/CloudU.Gated/140317.115955 + /// + public const String VersionControl = "VersionControl"; + + /// + /// Build container reference + /// E.g. #/2121/drop + /// + public const String Container = "Container"; + + /// + /// Git ref + /// E.g. refs/tags/MyCIDefinition.Buildable + /// + public const String GitRef = "GitRef"; + + /// + /// TFVC label + /// + public const String TfvcLabel = "TfvcLabel"; + + /// + /// Symbol store URL + /// E.g. https://mseng.artifacts.visualstudio.com/... + /// + public const String SymbolRequest = "SymbolRequest"; + + /// + /// Dedup Drop (old name fo PipelineArtifact) + /// E.g. drop1 + /// + public const String Drop = "Drop"; + + /// + /// Dedup'ed pipeline artifact + /// E.g. artifact1 + /// + public const String PipelineArtifact = "PipelineArtifact"; + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildDefinitionExtensions.cs b/src/Sdk/BuildWebApi/Api/BuildDefinitionExtensions.cs new file mode 100644 index 00000000000..756d3892295 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildDefinitionExtensions.cs @@ -0,0 +1,17 @@ +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + public static class BuildDefinitionExtensions + { + public static T GetProcess( + this BuildDefinition definition) where T : BuildProcess + { + ArgumentUtility.CheckForNull(definition, nameof(definition)); + ArgumentUtility.CheckForNull(definition.Process, nameof(definition.Process)); + ArgumentUtility.CheckType(definition.Process, nameof(definition.Process), nameof(T)); + + return definition.Process as T; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildDefinitionHelpers.cs b/src/Sdk/BuildWebApi/Api/BuildDefinitionHelpers.cs new file mode 100644 index 00000000000..c323b003be6 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildDefinitionHelpers.cs @@ -0,0 +1,44 @@ +using System; +using System.IO; +using System.Text; +using GitHub.Build.WebApi.Internals; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + public static class BuildDefinitionHelpers + { + public static BuildDefinition Deserialize( + String definitionString) + { + var definition = JsonUtility.FromString(definitionString); + if (definition?.Process == null) + { + var legacyDefinition = JsonConvert.DeserializeObject(definitionString); + definition = legacyDefinition.ToBuildDefinition(); + } + + return definition; + } + + public static BuildDefinitionTemplate GetTemplateFromStream( + Stream stream) + { + String templateString; + using (var reader = new StreamReader(stream, Encoding.UTF8, false, 1024, true)) + { + templateString = reader.ReadToEnd(); + } + + var template = JsonConvert.DeserializeObject(templateString); + if (template?.Template?.Process == null) + { + var legacyTemplate = JsonConvert.DeserializeObject(templateString); + template = legacyTemplate.ToBuildDefinitionTemplate(); + } + + return template; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildHttpClient.cs b/src/Sdk/BuildWebApi/Api/BuildHttpClient.cs new file mode 100644 index 00000000000..f5f1f9014f3 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildHttpClient.cs @@ -0,0 +1,1489 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Patch; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Build.WebApi +{ + public class BuildHttpClient : BuildHttpClientBase + { + static BuildHttpClient() + { + } + + public BuildHttpClient( + Uri baseUrl, + VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public BuildHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public BuildHttpClient( + Uri baseUrl, + VssCredentials credentials, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public BuildHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public BuildHttpClient( + Uri baseUrl, + HttpMessageHandler pipeline, + Boolean disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + /// + /// [Preview API] Creates a new definition. + /// + /// The definition. + /// + /// + /// + /// The cancellation token to cancel operation. + // this is just a convenient helper that uses the project id from the definition to call the API + public virtual Task CreateDefinitionAsync( + BuildDefinition definition, + Int32? definitionToCloneId = null, + Int32? definitionToCloneRevision = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(definition, "definition"); + ArgumentUtility.CheckForNull(definition.Project, "definition.Project"); + return base.CreateDefinitionAsync(definition, definition.Project.Id, definitionToCloneId, definitionToCloneRevision, userState, cancellationToken); + } + + /// + /// [Preview API] Updates an existing definition. + /// + /// The new version of the defintion. + /// + /// + /// + /// The cancellation token to cancel operation. + // this is just a convenient helper that uses the project id from the definition to call the API + public Task UpdateDefinitionAsync( + BuildDefinition definition, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return base.UpdateDefinitionAsync(definition, definition.Project.Id, definition.Id, null, null, userState, cancellationToken); + } + + /// + /// [Preview API] Queues a build + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + // this is just a convenient helper that uses the project id from the build to call the API + public virtual Task QueueBuildAsync( + Build build, + Boolean? ignoreWarnings = null, + String checkInTicket = null, + Int32? sourceBuildId = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(build, "build"); + ArgumentUtility.CheckForNull(build.Project, "build.Project"); + return base.QueueBuildAsync(build, build.Project.Id, ignoreWarnings, checkInTicket, sourceBuildId, userState, cancellationToken); + } + + /// + /// [Preview API] Updates a build. + /// + /// The build. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateBuildAsync( + Build build, + bool? retry = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + // the client generator produces methods with projectId and buildId parameters + // since we can get those from the build, they're redundant and potentially ambiguous (who wins?) + // so we generate it with protected access and provide this version that only accepts the Build + ArgumentUtility.CheckForNull(build, "build"); + ArgumentUtility.CheckForNull(build.Project, "build.Project"); + return base.UpdateBuildAsync(build, build.Project.Id, build.Id, retry, userState, cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return base.GetDefinitionsAsync( + project, + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename, + userState, + cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return base.GetDefinitionsAsync( + project, + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename, + userState, + cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync2( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync2( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetDefinitionsAsync2(string) instead.")] + public virtual Task> GetDefinitionsAsync2( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetFullDefinitionsAsync(string) instead.")] + public virtual Task> GetFullDefinitionsAsync( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync2( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync2( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetFullDefinitionsAsync2(string) instead.")] + public virtual Task> GetFullDefinitionsAsync2( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true, // includeAllProperties + includeLatestBuilds, + taskIdFilter, + processType, + yamlFilename); + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets builds + /// + /// Project ID + /// A comma-delimited list of definition ids + /// A comma-delimited list of queue ids + /// + /// + /// + /// + /// + /// + /// + /// A comma-delimited list of tags + /// A comma-delimited list of properties to include in the results + /// The maximum number of builds to retrieve + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public override Task> GetBuildsAsync( + Guid project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + string repositoryId = null, + string repositoryType = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + object routeValues = new { project = project }; + + List> queryParams = GetBuildsQueryParams( + definitions: definitions, + queues: queues, + buildNumber: buildNumber, + minTime: minFinishTime, + maxTime: maxFinishTime, + requestedFor: requestedFor, + reasonFilter: reasonFilter, + statusFilter: statusFilter, + resultFilter: resultFilter, + tagFilters: tagFilters, + properties: properties, + top: top, + continuationToken: continuationToken, + maxBuildsPerDefinition: maxBuildsPerDefinition, + deletedFilter: deletedFilter, + queryOrder: queryOrder, + branchName: branchName, + buildIds: buildIds, + repositoryId: repositoryId, + repositoryType: repositoryType, + userState: userState, + cancellationToken: cancellationToken); + + return SendAsync>( + httpMethod, + s_getBuildsLocationId, + routeValues: routeValues, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + public override Task> GetBuildsAsync( + string project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + string repositoryId = null, + string repositoryType = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + object routeValues = new { project = project }; + + List> queryParams = GetBuildsQueryParams( + definitions: definitions, + queues: queues, + buildNumber: buildNumber, + minTime: minFinishTime, + maxTime: maxFinishTime, + requestedFor: requestedFor, + reasonFilter: reasonFilter, + statusFilter: statusFilter, + resultFilter: resultFilter, + tagFilters: tagFilters, + properties: properties, + top: top, + continuationToken: continuationToken, + maxBuildsPerDefinition: maxBuildsPerDefinition, + deletedFilter: deletedFilter, + queryOrder: queryOrder, + branchName: branchName, + buildIds: buildIds, + repositoryId: repositoryId, + repositoryType: repositoryType, + userState: userState, + cancellationToken: cancellationToken); + + return SendAsync>( + httpMethod, + s_getBuildsLocationId, + routeValues: routeValues, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets builds + /// + /// Project ID + /// A comma-delimited list of definition ids + /// A comma-delimited list of queue ids + /// + /// + /// + /// + /// + /// + /// + /// A comma-delimited list of tags + /// A comma-delimited list of properties to include in the results + /// The maximum number of builds to retrieve + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildsAsync2( + Guid project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + object routeValues = new { project = project }; + + List> queryParams = GetBuildsQueryParams( + definitions: definitions, + queues: queues, + buildNumber: buildNumber, + minTime: minFinishTime, + maxTime: maxFinishTime, + requestedFor: requestedFor, + reasonFilter: reasonFilter, + statusFilter: statusFilter, + resultFilter: resultFilter, + tagFilters: tagFilters, + properties: properties, + top: top, + continuationToken: continuationToken, + maxBuildsPerDefinition: maxBuildsPerDefinition, + deletedFilter: deletedFilter, + queryOrder: queryOrder, + branchName: branchName, + buildIds: buildIds, + userState: userState, + cancellationToken: cancellationToken); + + return SendAsync>( + httpMethod, + s_getBuildsLocationId, + routeValues: routeValues, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + public virtual Task> GetBuildsAsync2( + string project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + object routeValues = new { project = project }; + + List> queryParams = GetBuildsQueryParams( + definitions: definitions, + queues: queues, + buildNumber: buildNumber, + minTime: minFinishTime, + maxTime: maxFinishTime, + requestedFor: requestedFor, + reasonFilter: reasonFilter, + statusFilter: statusFilter, + resultFilter: resultFilter, + tagFilters: tagFilters, + properties: properties, + top: top, + continuationToken: continuationToken, + maxBuildsPerDefinition: maxBuildsPerDefinition, + deletedFilter: deletedFilter, + queryOrder: queryOrder, + branchName: branchName, + buildIds: buildIds, + userState: userState, + cancellationToken: cancellationToken); + + return SendAsync>( + httpMethod, + s_getBuildsLocationId, + routeValues: routeValues, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets builds + /// + /// A comma-delimited list of definition ids + /// A comma-delimited list of queue ids + /// + /// + /// + /// + /// + /// + /// + /// A comma-delimited list of tags + /// A comma-delimited list of properties to include in the results + /// The maximum number of builds to retrieve + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildsAsync2( + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + + List> queryParams = GetBuildsQueryParams( + definitions: definitions, + queues: queues, + buildNumber: buildNumber, + minTime: minFinishTime, + maxTime: maxFinishTime, + requestedFor: requestedFor, + reasonFilter: reasonFilter, + statusFilter: statusFilter, + resultFilter: resultFilter, + tagFilters: tagFilters, + properties: properties, + top: top, + continuationToken: continuationToken, + maxBuildsPerDefinition: maxBuildsPerDefinition, + deletedFilter: deletedFilter, + queryOrder: queryOrder, + branchName: branchName, + buildIds: buildIds, + userState: userState, + cancellationToken: cancellationToken); + + return SendAsync>( + httpMethod, + s_getBuildsLocationId, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + + /// + /// [Preview API] The changes associated with a build + /// + /// Project ID or project name + /// + /// + /// The maximum number of changes to return + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildChangesAsync2( + string project, + int buildId, + string continuationToken = null, + int? top = null, + bool? includeSourceChange = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("54572c7b-bbd3-45d4-80dc-28be08941620"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (includeSourceChange != null) + { + queryParams.Add("includeSourceChange", includeSourceChange.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_ChangesApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList); + } + + /// + /// [Preview API] The changes associated with a build + /// + /// Project ID + /// + /// + /// The maximum number of changes to return + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildChangesAsync2( + Guid project, + int buildId, + string continuationToken = null, + int? top = null, + bool? includeSourceChange = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("54572c7b-bbd3-45d4-80dc-28be08941620"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (includeSourceChange != null) + { + queryParams.Add("includeSourceChange", includeSourceChange.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_ChangesApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList); + } + + // this method does a compat check to see whether the server uses "minFinishTime" and "maxFinishTime" vs. "minTime" and "maxTime" + private List> GetBuildsQueryParams( + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minTime = null, + DateTime? maxTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + string repositoryId = null, + string repositoryType = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + List> queryParams = new List>(); + + // default to false to keep the existing behavior + Boolean version4_1IsNotAvailable = true; + + // get latest version available on server + // note we could await here and change all methods to async, however just for this one call, async/await overhead is probably not worth it, reconsider if we have more async calls + ApiResourceLocation location = GetResourceLocationAsync(s_getBuildsLocationId, userState, cancellationToken).ConfigureAwait(false).GetAwaiter().GetResult(); + if (location != null) + { + if (location.MaxVersion >= s_BuildsApiVersion.ApiVersion) + { + // server has client's requested version 4.1 or greater + version4_1IsNotAvailable = false; + } + } + + // in case the server doesn't support 3.x yet + queryParams.Add("type", DefinitionType.Build.ToString()); + + if (definitions != null && definitions.Any()) + { + queryParams.Add("definitions", string.Join(",", definitions)); + } + if (queues != null && queues.Any()) + { + queryParams.Add("queues", string.Join(",", queues)); + } + if (!string.IsNullOrEmpty(buildNumber)) + { + queryParams.Add("buildNumber", buildNumber); + } + + if (version4_1IsNotAvailable) + { + if (minTime != null) + { + AddDateTimeToQueryParams(queryParams, "minFinishTime", minTime.Value); + } + if (maxTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxFinishTime", maxTime.Value); + } + } + else + { + if (minTime != null) + { + AddDateTimeToQueryParams(queryParams, "minTime", minTime.Value); + } + if (maxTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxTime", maxTime.Value); + } + } + + if (!string.IsNullOrEmpty(requestedFor)) + { + queryParams.Add("requestedFor", requestedFor); + } + if (reasonFilter != null) + { + queryParams.Add("reasonFilter", reasonFilter.Value.ToString()); + } + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (resultFilter != null) + { + queryParams.Add("resultFilter", resultFilter.Value.ToString()); + } + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (maxBuildsPerDefinition != null) + { + queryParams.Add("maxBuildsPerDefinition", maxBuildsPerDefinition.Value.ToString(CultureInfo.InvariantCulture)); + } + if (deletedFilter != null) + { + queryParams.Add("deletedFilter", deletedFilter.Value.ToString()); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (!string.IsNullOrEmpty(branchName)) + { + queryParams.Add("branchName", branchName); + } + if (buildIds != null && buildIds.Any()) + { + queryParams.Add("buildIds", string.Join(",", buildIds)); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + + return queryParams; + } + + private string NormalizeJsonPatchPath(string key) + { + const string JsonPatchPathStartString = "/"; + if (key.StartsWith(JsonPatchPathStartString)) + { + return key; + } + + return string.Format("{0}{1}", JsonPatchPathStartString, key); + } + + private static readonly ApiResourceVersion s_ChangesApiVersion = new ApiResourceVersion("4.1-preview.2"); + private static readonly Guid s_getBuildsLocationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildHttpClientCompatBase.cs b/src/Sdk/BuildWebApi/Api/BuildHttpClientCompatBase.cs new file mode 100644 index 00000000000..7b83a7236e2 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildHttpClientCompatBase.cs @@ -0,0 +1,2463 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + public abstract class BuildHttpClientCompatBase: VssHttpClientBase + { + public BuildHttpClientCompatBase(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public BuildHttpClientCompatBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public BuildHttpClientCompatBase(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public BuildHttpClientCompatBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public BuildHttpClientCompatBase(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + [Obsolete] + public virtual Task> GetBuildsAsync( + string project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (definitions != null && definitions.Any()) + { + queryParams.Add("definitions", string.Join(",", definitions)); + } + if (queues != null && queues.Any()) + { + queryParams.Add("queues", string.Join(",", queues)); + } + if (!string.IsNullOrEmpty(buildNumber)) + { + queryParams.Add("buildNumber", buildNumber); + } + if (minFinishTime != null) + { + AddDateTimeToQueryParams(queryParams, "minFinishTime", minFinishTime.Value); + } + if (maxFinishTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxFinishTime", maxFinishTime.Value); + } + if (!string.IsNullOrEmpty(requestedFor)) + { + queryParams.Add("requestedFor", requestedFor); + } + if (reasonFilter != null) + { + queryParams.Add("reasonFilter", reasonFilter.Value.ToString()); + } + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (resultFilter != null) + { + queryParams.Add("resultFilter", resultFilter.Value.ToString()); + } + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (maxBuildsPerDefinition != null) + { + queryParams.Add("maxBuildsPerDefinition", maxBuildsPerDefinition.Value.ToString(CultureInfo.InvariantCulture)); + } + if (deletedFilter != null) + { + queryParams.Add("deletedFilter", deletedFilter.Value.ToString()); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (!string.IsNullOrEmpty(branchName)) + { + queryParams.Add("branchName", branchName); + } + if (buildIds != null && buildIds.Any()) + { + queryParams.Add("buildIds", string.Join(",", buildIds)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Queues a build + /// + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task QueueBuildAsync( + Build build, + Guid project, + bool? ignoreWarnings = null, + string checkInTicket = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (ignoreWarnings != null) + { + queryParams.Add("ignoreWarnings", ignoreWarnings.Value.ToString()); + } + if (!string.IsNullOrEmpty(checkInTicket)) + { + queryParams.Add("checkInTicket", checkInTicket); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.0-preview.4"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Queues a build + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task QueueBuildAsync( + Build build, + bool? ignoreWarnings = null, + string checkInTicket = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (ignoreWarnings != null) + { + queryParams.Add("ignoreWarnings", ignoreWarnings.Value.ToString()); + } + if (!string.IsNullOrEmpty(checkInTicket)) + { + queryParams.Add("checkInTicket", checkInTicket); + } + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion("5.0-preview.4"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Queues a build + /// + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task QueueBuildAsync( + Build build, + string project, + bool? ignoreWarnings = null, + string checkInTicket = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (ignoreWarnings != null) + { + queryParams.Add("ignoreWarnings", ignoreWarnings.Value.ToString()); + } + if (!string.IsNullOrEmpty(checkInTicket)) + { + queryParams.Add("checkInTicket", checkInTicket); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.0-preview.4"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets builds + /// + /// Project ID + /// A comma-delimited list of definition ids + /// A comma-delimited list of queue ids + /// + /// + /// + /// + /// + /// + /// + /// A comma-delimited list of tags + /// A comma-delimited list of properties to include in the results + /// The maximum number of builds to retrieve + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildsAsync( + Guid project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (definitions != null && definitions.Any()) + { + queryParams.Add("definitions", string.Join(",", definitions)); + } + if (queues != null && queues.Any()) + { + queryParams.Add("queues", string.Join(",", queues)); + } + if (!string.IsNullOrEmpty(buildNumber)) + { + queryParams.Add("buildNumber", buildNumber); + } + if (minFinishTime != null) + { + AddDateTimeToQueryParams(queryParams, "minFinishTime", minFinishTime.Value); + } + if (maxFinishTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxFinishTime", maxFinishTime.Value); + } + if (!string.IsNullOrEmpty(requestedFor)) + { + queryParams.Add("requestedFor", requestedFor); + } + if (reasonFilter != null) + { + queryParams.Add("reasonFilter", reasonFilter.Value.ToString()); + } + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (resultFilter != null) + { + queryParams.Add("resultFilter", resultFilter.Value.ToString()); + } + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (maxBuildsPerDefinition != null) + { + queryParams.Add("maxBuildsPerDefinition", maxBuildsPerDefinition.Value.ToString(CultureInfo.InvariantCulture)); + } + if (deletedFilter != null) + { + queryParams.Add("deletedFilter", deletedFilter.Value.ToString()); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (!string.IsNullOrEmpty(branchName)) + { + queryParams.Add("branchName", branchName); + } + if (buildIds != null && buildIds.Any()) + { + queryParams.Add("buildIds", string.Join(",", buildIds)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets builds + /// + /// A comma-delimited list of definition ids + /// A comma-delimited list of queue ids + /// + /// + /// + /// + /// + /// + /// + /// A comma-delimited list of tags + /// A comma-delimited list of properties to include in the results + /// The maximum number of builds to retrieve + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildsAsync( + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minFinishTime = null, + DateTime? maxFinishTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + + List> queryParams = new List>(); + if (definitions != null && definitions.Any()) + { + queryParams.Add("definitions", string.Join(",", definitions)); + } + if (queues != null && queues.Any()) + { + queryParams.Add("queues", string.Join(",", queues)); + } + if (!string.IsNullOrEmpty(buildNumber)) + { + queryParams.Add("buildNumber", buildNumber); + } + if (minFinishTime != null) + { + AddDateTimeToQueryParams(queryParams, "minFinishTime", minFinishTime.Value); + } + if (maxFinishTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxFinishTime", maxFinishTime.Value); + } + if (!string.IsNullOrEmpty(requestedFor)) + { + queryParams.Add("requestedFor", requestedFor); + } + if (reasonFilter != null) + { + queryParams.Add("reasonFilter", reasonFilter.Value.ToString()); + } + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (resultFilter != null) + { + queryParams.Add("resultFilter", resultFilter.Value.ToString()); + } + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (maxBuildsPerDefinition != null) + { + queryParams.Add("maxBuildsPerDefinition", maxBuildsPerDefinition.Value.ToString(CultureInfo.InvariantCulture)); + } + if (deletedFilter != null) + { + queryParams.Add("deletedFilter", deletedFilter.Value.ToString()); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (!string.IsNullOrEmpty(branchName)) + { + queryParams.Add("branchName", branchName); + } + if (buildIds != null && buildIds.Any()) + { + queryParams.Add("buildIds", string.Join(",", buildIds)); + } + + return SendAsync>( + httpMethod, + locationId, + version: s_BuildsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a definition, optionally at a specific revision + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetDefinitionAsync( + string project, + int definitionId, + int? revision = null, + IEnumerable propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a definition, optionally at a specific revision + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetDefinitionAsync( + Guid project, + int definitionId, + int? revision = null, + IEnumerable propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a definition, optionally at a specific revision + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetDefinitionAsync(string, int) instead.")] + public virtual Task GetDefinitionAsync( + int definitionId, + int? revision = null, + IEnumerable propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetDefinitionsAsync(string) instead.")] + public virtual Task> GetDefinitionsAsync( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetDefinitionsAsync(string) instead.")] + protected virtual Task> GetDefinitionsAsync( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of definitions. + /// + /// Project ID or project name + /// If specified, filters to definitions whose names match this pattern. + /// A repository ID. If specified, filters to definitions that use this repository. + /// If specified, filters to definitions that have a repository of this type. + /// Indicates the order in which definitions should be returned. + /// The maximum number of definitions to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list that specifies the IDs of definitions to retrieve. + /// If specified, filters to definitions under this folder. + /// If specified, filters to definitions that have builds after this date. + /// If specified, filters to definitions that do not have builds after this date. + /// Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned. + /// Indicates whether to return the latest and latest completed builds for this definition. + /// If specified, filters to definitions that use the specified task. + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.0-preview.6"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of definitions. + /// + /// Project ID + /// If specified, filters to definitions whose names match this pattern. + /// A repository ID. If specified, filters to definitions that use this repository. + /// If specified, filters to definitions that have a repository of this type. + /// Indicates the order in which definitions should be returned. + /// The maximum number of definitions to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list that specifies the IDs of definitions to retrieve. + /// If specified, filters to definitions under this folder. + /// If specified, filters to definitions that have builds after this date. + /// If specified, filters to definitions that do not have builds after this date. + /// Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned. + /// Indicates whether to return the latest and latest completed builds for this definition. + /// If specified, filters to definitions that use the specified task. + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.0-preview.6"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of definitions. + /// + /// If specified, filters to definitions whose names match this pattern. + /// A repository ID. If specified, filters to definitions that use this repository. + /// If specified, filters to definitions that have a repository of this type. + /// Indicates the order in which definitions should be returned. + /// The maximum number of definitions to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list that specifies the IDs of definitions to retrieve. + /// If specified, filters to definitions under this folder. + /// If specified, filters to definitions that have builds after this date. + /// If specified, filters to definitions that do not have builds after this date. + /// Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned. + /// Indicates whether to return the latest and latest completed builds for this definition. + /// If specified, filters to definitions that use the specified task. + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion("5.0-preview.6"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync2( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionsAsync2( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetDefinitionsAsync2(string) instead.")] + public virtual Task> GetDefinitionsAsync2( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + false); + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetFullDefinitionsAsync(string) instead.")] + public virtual Task> GetFullDefinitionsAsync( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true); + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync2( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFullDefinitionsAsync2( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Gets definitions, optionally filtered by name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use GetFullDefinitionsAsync2(string) instead.")] + public virtual Task> GetFullDefinitionsAsync2( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + + List> queryParams = GetDefinitionsQueryParams( + name, + repositoryId, + repositoryType, + queryOrder, + top, + continuationToken, + minMetricsTimeInUtc, + definitionIds, + path, + builtAfter, + notBuiltAfter, + true); + + return SendAsync>( + httpMethod, + locationId, + version: s_DefinitionsApiVersion, + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList + ); + } + + /// + /// [Preview API] Updates a build. + /// + /// The build. + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use UpdateBuildAsync(Build, bool, object, CancellationToken) instead.")] + public virtual Task UpdateBuildAsync( + Build build, + int buildId, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { buildId = buildId }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.0, 4), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates a build. + /// + /// The build. + /// Project ID or project name + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use UpdateBuildAsync(Build, bool, object, CancellationToken) instead.")] + public virtual Task UpdateBuildAsync( + Build build, + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.0, 4), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates a build. + /// + /// The build. + /// Project ID + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + [Obsolete("Use UpdateBuildAsync(Build, bool, object, CancellationToken) instead.")] + public virtual Task UpdateBuildAsync( + Build build, + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.0, 4), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets a list of branches for the given source code repository. + /// + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// + /// The cancellation token to cancel operation. + public virtual Task> ListBranchesAsync( + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e05d4403-9b81-4244-8763-20fde28d1976"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of branches for the given source code repository. + /// + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// + /// The cancellation token to cancel operation. + public virtual Task> ListBranchesAsync( + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e05d4403-9b81-4244-8763-20fde28d1976"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + private protected List> GetDefinitionsQueryParams( + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTimeInUtc = null, + IEnumerable definitionIds = null, + String path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null) + { + List> queryParams = new List>(); + + // in case the server doesn't support 3.x yet + queryParams.Add("type", DefinitionType.Build.ToString()); + + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (!string.IsNullOrEmpty(repositoryId)) + { + queryParams.Add("repositoryId", repositoryId); + } + if (!string.IsNullOrEmpty(repositoryType)) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTimeInUtc != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTimeInUtc", minMetricsTimeInUtc.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (!string.IsNullOrEmpty(path)) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties ?? false) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds ?? false) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + if (taskIdFilter.HasValue) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + if (processType.HasValue) + { + queryParams.Add("processType", processType.Value.ToString(CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrEmpty(yamlFilename)) + { + queryParams.Add("yamlFilename", yamlFilename); + } + + return queryParams; + } + + private protected async Task> GetPagedList(HttpResponseMessage responseMessage, CancellationToken cancellationToken) + { + var continuationToken = GetContinuationToken(responseMessage); + var list = await ReadContentAsAsync>(responseMessage, cancellationToken).ConfigureAwait(false); + return new PagedList(list, continuationToken); + } + + private protected Task SendAsync( + HttpMethod method, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Func> processResponse = null) + { + return SendAsync(method, null, locationId, routeValues, version, content, queryParameters, userState, cancellationToken, processResponse); + } + + private protected async Task SendAsync( + HttpMethod method, + IEnumerable> additionalHeaders, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Func> processResponse = null) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync(method, additionalHeaders, locationId, routeValues, version, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + return await SendAsync(requestMessage, userState, cancellationToken, processResponse).ConfigureAwait(false); + } + } + + private protected async Task SendAsync( + HttpRequestMessage message, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Func> processResponse = null) + { + if (processResponse == null) + { + processResponse = ReadContentAsAsync; + } + + //ConfigureAwait(false) enables the continuation to be run outside + //any captured SyncronizationContext (such as ASP.NET's) which keeps things + //from deadlocking... + using (HttpResponseMessage response = await this.SendAsync(message, userState, cancellationToken).ConfigureAwait(false)) + { + return await processResponse(response, cancellationToken).ConfigureAwait(false); + } + } + + private protected string GetContinuationToken(HttpResponseMessage responseMessage) + { + string continuationToken = null; + + IEnumerable headerValues = null; + if (responseMessage.Headers.TryGetValues("x-ms-continuationtoken", out headerValues)) + { + continuationToken = headerValues.FirstOrDefault(); + } + + return continuationToken; + } + + protected static readonly ApiResourceVersion s_BuildsApiVersion = new ApiResourceVersion("4.1-preview.3"); + protected static readonly ApiResourceVersion s_DefinitionsApiVersion = new ApiResourceVersion("4.1-preview.6"); + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildIssueKeys.cs b/src/Sdk/BuildWebApi/Api/BuildIssueKeys.cs new file mode 100644 index 00000000000..dc9181c70f2 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildIssueKeys.cs @@ -0,0 +1,21 @@ +using System; + +namespace GitHub.Build.WebApi +{ + public static class BuildIssueKeys + { + public const String CodeCategory = "code"; + public const String SourcePath = "sourcePath"; + public const String LineNumber = "lineNumber"; + public const String Message = "message"; + } + + [Obsolete("Use BuildIssueKeys instead.")] + public static class WellKnownBuildKeys + { + public const String BuildIssueCodeCategory = BuildIssueKeys.CodeCategory; + public const String BuildIssueFileKey = BuildIssueKeys.SourcePath; + public const String BuildIssueLineNumberKey = BuildIssueKeys.LineNumber; + public const String BuildIssueMessageKey = BuildIssueKeys.Message; + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildOrchestrationType.cs b/src/Sdk/BuildWebApi/Api/BuildOrchestrationType.cs new file mode 100644 index 00000000000..ee194908b49 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildOrchestrationType.cs @@ -0,0 +1,12 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class BuildOrchestrationType + { + public const Int32 Build = 1; + public const Int32 Cleanup = 2; + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildPermissions.cs b/src/Sdk/BuildWebApi/Api/BuildPermissions.cs new file mode 100644 index 00000000000..1f62c6d6032 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildPermissions.cs @@ -0,0 +1,40 @@ +using System; + +namespace GitHub.Build.WebApi +{ + public static class BuildPermissions + { + public static readonly Int32 ViewBuilds = 1; + public static readonly Int32 EditBuildQuality = 2; + public static readonly Int32 RetainIndefinitely = 4; + public static readonly Int32 DeleteBuilds = 8; + public static readonly Int32 ManageBuildQualities = 16; + public static readonly Int32 DestroyBuilds = 32; + public static readonly Int32 UpdateBuildInformation = 64; + public static readonly Int32 QueueBuilds = 128; + public static readonly Int32 ManageBuildQueue = 256; + public static readonly Int32 StopBuilds = 512; + public static readonly Int32 ViewBuildDefinition = 1024; + public static readonly Int32 EditBuildDefinition = 2048; + public static readonly Int32 DeleteBuildDefinition = 4096; + public static readonly Int32 OverrideBuildCheckInValidation = 8192; + public static readonly Int32 AdministerBuildPermissions = 16384; + + public static readonly Int32 AllPermissions = + ViewBuilds | + EditBuildQuality | + RetainIndefinitely | + DeleteBuilds | + ManageBuildQualities | + DestroyBuilds | + UpdateBuildInformation | + QueueBuilds | + ManageBuildQueue | + StopBuilds | + ViewBuildDefinition | + EditBuildDefinition | + DeleteBuildDefinition | + OverrideBuildCheckInValidation | + AdministerBuildPermissions; + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildResourceIds.cs b/src/Sdk/BuildWebApi/Api/BuildResourceIds.cs new file mode 100644 index 00000000000..8198272d295 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildResourceIds.cs @@ -0,0 +1,165 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants(alternateName: "Build2ResourceIds")] + public static class BuildResourceIds + { + // keep these sorted + + public const String AreaId = "5D6898BB-45EC-463F-95F9-54D49C71752E"; + public const String AreaName = "build"; + + public static readonly Guid Artifacts = new Guid("{1DB06C96-014E-44E1-AC91-90B2D4B3E984}"); + public const String ArtifactsResource = "artifacts"; + + public const String AttachmentLocation = "AF5122D3-3438-485E-A25A-2DBBFDE84EE6"; + public static readonly Guid Attachment = new Guid(AttachmentLocation); + + public const String AttachmentsLocation = "F2192269-89FA-4F94-BAF6-8FB128C55159"; + public static readonly Guid Attachments = new Guid(AttachmentsLocation); + public const String AttachmentsResource = "attachments"; + + public static readonly Guid BuildBadge = new Guid("21b3b9ce-fad5-4567-9ad0-80679794e003"); + public const String BuildBadgeResource = "buildbadge"; + + public const String BuildChangesLocationId = "54572C7B-BBD3-45D4-80DC-28BE08941620"; + public static readonly Guid BuildChangesBetweenBuilds = new Guid("{F10F0EA5-18A1-43EC-A8FB-2042C7BE9B43}"); + public static readonly Guid BuildChanges = new Guid(BuildChangesLocationId); + public const String BuildChangesResource = "changes"; + + public static readonly Guid BuildDefinitionBadge = new Guid("de6a4df8-22cd-44ee-af2d-39f6aa7a4261"); + public const String BuildDefinitionBadgeResource = "badge"; + + public static readonly Guid BuildDeployments = new Guid("{F275BE9A-556A-4EE9-B72F-F9C8370CCAEE}"); + public const String BuildDeploymentsResource = "deployments"; + + public static readonly Guid BuildLogs = new Guid("{35A80DAF-7F30-45FC-86E8-6B813D9C90DF}"); + public const String BuildLogsResource = "logs"; + + public const String BuildPropertiesLocationString = "0A6312E9-0627-49B7-8083-7D74A64849C9"; + public static readonly Guid BuildProperties = new Guid(BuildPropertiesLocationString); + + public static readonly Guid BuildReport = new Guid("{45BCAA88-67E1-4042-A035-56D3B4A7D44C}"); + public const String BuildReportResource = "report"; + + public static readonly Guid Builds = new Guid("{0CD358E1-9217-4D94-8269-1C1EE6F93DCF}"); + public const String BuildsResource = "builds"; + + public const String BuildTagsLocationIdString = "6E6114B2-8161-44C8-8F6C-C5505782427F"; + public static readonly Guid BuildTags = new Guid(BuildTagsLocationIdString); + public const String BuildTagsResource = "tags"; + + public const String BuildWorkItemsLocationId = "5A21F5D2-5642-47E4-A0BD-1356E6731BEE"; + public static readonly Guid BuildWorkItemsBetweenBuilds = new Guid("{52BA8915-5518-42E3-A4BB-B0182D159E2D}"); + public static readonly Guid BuildWorkItems = new Guid(BuildWorkItemsLocationId); + public const String BuildWorkItemsResource = "workitems"; + + public const String ControllersLocationString = "{FCAC1932-2EE1-437F-9B6F-7F696BE858F6}"; + public static readonly Guid Controllers = new Guid(ControllersLocationString); + public const String ControllersResource = "Controllers"; + + public const String DefinitionMetricsLocationString = "D973B939-0CE0-4FEC-91D8-DA3940FA1827"; + public static readonly Guid DefinitionMetrics = new Guid(DefinitionMetricsLocationString); + public const String DefinitionMetricsResource = "metrics"; + + public const String DefinitionPropertiesLocationString = "D9826AD7-2A68-46A9-A6E9-677698777895"; + public static readonly Guid DefinitionProperties = new Guid(DefinitionPropertiesLocationString); + + public static readonly Guid DefinitionResources = new Guid("ea623316-1967-45eb-89ab-e9e6110cf2d6"); + public const String DefinitionResourcesResource = "resources"; + + public static readonly Guid DefinitionRevisions = new Guid("{7C116775-52E5-453E-8C5D-914D9762D8C4}"); + public const String DefinitionRevisionsResource = "revisions"; + + public static readonly Guid Definitions = new Guid("{DBEAF647-6167-421A-BDA9-C9327B25E2E6}"); + public const String DefinitionsResource = "definitions"; + + public const String DefinitionTagsLocationIdString = "CB894432-134A-4D31-A839-83BECEAACE4B"; + public static readonly Guid DefinitionTags = new Guid(DefinitionTagsLocationIdString); + + public static readonly Guid Folders = new Guid("{A906531B-D2DA-4F55-BDA7-F3E676CC50D9}"); + public const String FoldersResource = "folders"; + + // information nodes for XAML builds + public static readonly Guid InformationNodes = new Guid("9F094D42-B41C-4920-95AA-597581A79821"); + + public static readonly Guid InputValuesQuery = new Guid("{2182A7F0-B363-4B2D-B89E-ED0A0B721E95}"); + public const String InputValuesQueryResource = "InputValuesQuery"; + + public static readonly Guid LatestBuildLocationId = new Guid("54481611-01F4-47F3-998F-160DA0F0C229"); + public const String LatestBuildResource = "latest"; + + public static readonly Guid Metrics = new Guid("104AD424-B758-4699-97B7-7E7DA427F9C2"); + public const String MetricsResource = "Metrics"; + + public static readonly Guid Options = new Guid("{591CB5A4-2D46-4F3A-A697-5CD42B6BD332}"); + public const String OptionsResource = "options"; + + public const String ProjectMetricsLocationString = "7433FAE7-A6BC-41DC-A6E2-EEF9005CE41A"; + public static readonly Guid ProjectMetrics = new Guid(ProjectMetricsLocationString); + + public static readonly Guid ProjectAuthorizedResources = new Guid("398c85bc-81aa-4822-947c-a194a05f0fef"); + public const String ProjectAuthorizedResourcesResource = "authorizedresources"; + + public const String PropertiesResource = "properties"; + + public static readonly Guid Queues = new Guid("{09F2A4B8-08C9-4991-85C3-D698937568BE}"); + public const String QueuesResource = "queues"; + + public static readonly Guid Settings = new Guid("{AA8C1C9C-EF8B-474A-B8C4-785C7B191D0D}"); + public const String SettingsResource = "settings"; + + public const String SourceProviderBranchesResource = "branches"; + public const String SourceProviderBranchesLocationIdString = "E05D4403-9B81-4244-8763-20FDE28D1976"; + public static readonly Guid SourceProviderBranchesLocationId = new Guid(SourceProviderBranchesLocationIdString); + + public const String SourceProviderFileContentsResource = "fileContents"; + public const String SourceProviderFileContentsLocationIdString = "29D12225-B1D9-425F-B668-6C594A981313"; + public static readonly Guid SourceProviderFileContentsLocationId = new Guid(SourceProviderFileContentsLocationIdString); + + public const String SourceProviderPathContentsResource = "pathContents"; + public const String SourceProviderPathContentsLocationIdString = "7944D6FB-DF01-4709-920A-7A189AA34037"; + public static readonly Guid SourceProviderPathContentsLocationId = new Guid(SourceProviderPathContentsLocationIdString); + + public const String SourceProviderPullRequestsResource = "pullRequests"; + public const String SourceProviderPullRequestsLocationIdString = "D8763EC7-9FF0-4FB4-B2B2-9D757906FF14"; + public static readonly Guid SourceProviderPullRequestsLocationId = new Guid(SourceProviderPullRequestsLocationIdString); + + public const String SourceProviderRepositoriesResource = "repositories"; + public const String SourceProviderRepositoriesLocationIdString = "D44D1680-F978-4834-9B93-8C6E132329C9"; + public static readonly Guid SourceProviderRepositoriesLocationId = new Guid(SourceProviderRepositoriesLocationIdString); + + public const String SourceProviderRestoreWebhooksLocationIdString = "793BCEB8-9736-4030-BD2F-FB3CE6D6B478"; + public static readonly Guid SourceProviderRestoreWebhooksLocationId = new Guid(SourceProviderRestoreWebhooksLocationIdString); + + public const String SourceProvidersResource = "sourceProviders"; + public const String SourceProvidersLocationIdString = "3CE81729-954F-423D-A581-9FEA01D25186"; + public static readonly Guid SourceProviders = new Guid(SourceProvidersLocationIdString); + + public const String SourceProviderWebhooksResource = "webhooks"; + public const String SourceProviderWebhooksLocationIdString = "8F20FF82-9498-4812-9F6E-9C01BDC50E99"; + public static readonly Guid SourceProviderWebhooksLocationId = new Guid(SourceProviderWebhooksLocationIdString); + + public const String SourcesLocationId = "56EFDCDC-CF90-4028-9D2F-D41000682202"; + public static readonly Guid Sources = new Guid(SourcesLocationId); + public const String SourcesResource = "sources"; + + public const String StatusBadgeLocationIdString = "07ACFDCE-4757-4439-B422-DDD13A2FCC10"; + public static readonly Guid StatusBadgeLocationId = new Guid(StatusBadgeLocationIdString); + public const String StatusBadgeResource = "status"; + + public const String TagsLocationIdString = "D84AC5C6-EDC7-43D5-ADC9-1B34BE5DEA09"; + public static readonly Guid Tags = new Guid(TagsLocationIdString); + + public static readonly Guid Templates = new Guid("{E884571E-7F92-4D6A-9274-3F5649900835}"); + public const String TemplatesResource = "templates"; + + public static readonly Guid Timelines = new Guid("8baac422-4c6e-4de5-8532-db96d92acffa"); + public const String TimelinesResource = "Timeline"; + + public static readonly Guid Usage = new Guid("3813d06c-9e36-4ea1-aac3-61a485d60e3d"); + public const String UsageResource = "ResourceUsage"; + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildTemplateCategories.cs b/src/Sdk/BuildWebApi/Api/BuildTemplateCategories.cs new file mode 100644 index 00000000000..b84871637d6 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildTemplateCategories.cs @@ -0,0 +1,29 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class BuildTemplateCategories + { + public static readonly String All = "All"; + public static readonly String Build = "Build"; + public static readonly String Utility = "Utility"; + public static readonly String Test = "Test"; + public static readonly String Package = "Package"; + public static readonly String Deploy = "Deploy"; + public static readonly String Tool = "Tool"; + public static readonly String Custom = "Custom"; + + public static readonly String[] AllCategories = new String[] { + All, + Build, + Utility, + Test, + Package, + Deploy, + Tool, + Custom + }; + } +} diff --git a/src/Sdk/BuildWebApi/Api/BuildVariables.cs b/src/Sdk/BuildWebApi/Api/BuildVariables.cs new file mode 100644 index 00000000000..0ec79c1c65f --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/BuildVariables.cs @@ -0,0 +1,87 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + /* IMPORTANT NOTE: if you're adding a new build variable that's designed to hold PII data + (e.g. names, addresses, phone numbers, IP addresses, emails), please add a corresponding reference to `PiiVariables` at + https://github.com/Microsoft/azure-pipelines-agent/blob/master/src/Agent.Worker/Variables.cs + This is so the agent can scrub the variable value from the diagnostics log. */ + [GenerateAllConstants] + public static class BuildVariables + { + public const String CollectionId = "system.collectionId"; + public const String DefinitionId = "system.definitionId"; + public const String HostType = "system.hosttype"; + public const String IsFork = "system.pullRequest.isFork"; + public const String ForkSecretsRemoved= "system.pullRequest.forkSecretsRemoved"; + public const String PullRequestId = "system.pullRequest.pullRequestId"; + public const String PullRequestNumber = "system.pullRequest.pullRequestNumber"; + public const String PullRequestIterationId = "system.pullRequest.pullRequestIteration"; + public const String PullRequestSourceBranch = "system.pullRequest.sourceBranch"; + public const String PullRequestTargetBranch = "system.pullRequest.targetBranch"; + public const String PullRequestSourceRepositoryUri = "system.pullRequest.sourceRepositoryUri"; + public const String PullRequestSourceCommitId = "system.pullRequest.sourceCommitId"; + public const String PullRequestMergedAt = "system.pullRequest.mergedAt"; + public const String System = "system"; + public const String TeamProject = "system.teamProject"; + public const String TeamProjectId = "system.teamProjectId"; + + public const String BuildId = "build.buildId"; + public const String BuildNumber = "build.buildNumber"; + public const String BuildUri = "build.buildUri"; + public const String ContainerId = "build.containerId"; + public const String DefinitionName = "build.definitionName"; + public const String DefinitionVersion = "build.definitionVersion"; + public const String JobAuthorizeAs = "Job.AuthorizeAs"; + public const String JobAuthorizeAsId = "Job.AuthorizeAsId"; + public const String QueuedBy = "build.queuedBy"; + public const String QueuedById = "build.queuedById"; + public const String Reason = "build.reason"; + public const String RepoUri = "build.repository.uri"; + public const String RequestedFor = "build.requestedFor"; + public const String RequestedForEmail = "build.requestedForEmail"; + public const String RequestedForId = "build.requestedForId"; + public const String SourceBranch = "build.sourceBranch"; + public const String SourceBranchName = "build.sourceBranchName"; + public const String SourceTfvcShelveset = "build.sourceTfvcShelveset"; + public const String SourceVersion = "build.sourceVersion"; + public const String SourceVersionAuthor = "build.sourceVersionAuthor"; + public const String SourceVersionMessage = "build.sourceVersionMessage"; + public const String SyncSources = "build.syncSources"; + } + + [Obsolete("Use BuildVariables instead.")] + public static class WellKnownBuildVariables + { + public const String System = BuildVariables.System; + public const String CollectionId = BuildVariables.CollectionId; + public const String TeamProject = BuildVariables.TeamProject; + public const String TeamProjectId = BuildVariables.TeamProjectId; + public const String DefinitionId = BuildVariables.DefinitionId; + public const String HostType = BuildVariables.HostType; + public const String IsFork = BuildVariables.IsFork; + public const String DefinitionName = BuildVariables.DefinitionName; + public const String DefinitionVersion = BuildVariables.DefinitionVersion; + public const String QueuedBy = BuildVariables.QueuedBy; + public const String QueuedById = BuildVariables.QueuedById; + public const String Reason = BuildVariables.Reason; + public const String RequestedFor = BuildVariables.RequestedFor; + public const String RequestedForId = BuildVariables.RequestedForId; + public const String RequestedForEmail = BuildVariables.RequestedForEmail; + public const String SourceBranch = BuildVariables.SourceBranch; + public const String SourceBranchName = BuildVariables.SourceBranchName; + public const String SourceVersion = BuildVariables.SourceVersion; + public const String SourceVersionAuthor = BuildVariables.SourceVersionAuthor; + public const String SourceVersionMessage = BuildVariables.SourceVersionMessage; + public const String SourceTfvcShelveset = BuildVariables.SourceTfvcShelveset; + public const String BuildId = BuildVariables.BuildId; + public const String BuildUri = BuildVariables.BuildUri; + public const String BuildNumber = BuildVariables.BuildNumber; + public const String ContainerId = BuildVariables.ContainerId; + public const String SyncSources = BuildVariables.SyncSources; + public const String JobAuthorizeAs = BuildVariables.JobAuthorizeAs; + public const String JobAuthorizeAsId = BuildVariables.JobAuthorizeAsId; + public const String RepoUri = BuildVariables.RepoUri; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueue.cs b/src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueue.cs new file mode 100644 index 00000000000..0385ffaa72b --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueue.cs @@ -0,0 +1,134 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a queue for running builds. + /// + [DataContract] +#pragma warning disable 618 + public class AgentPoolQueue : ShallowReference, ISecuredObject +#pragma warning restore 618 + { + public AgentPoolQueue() + { + } + + internal AgentPoolQueue( + ISecuredObject securedObject) + { + this.m_securedObject = securedObject; + } + + /// + /// The ID of the queue. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public new Int32 Id + { + get + { + return base.Id; + } + set + { + base.Id = value; + } + } + + /// + /// The name of the queue. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public new String Name + { + get + { + return base.Name; + } + set + { + base.Name = value; + } + } + + /// + /// The full http link to the resource. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public new String Url + { + get + { + return base.Url; + } + set + { + base.Url = value; + } + } + + /// + /// The pool used by this queue. + /// + [DataMember] + public TaskAgentPoolReference Pool + { + get; + set; + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + + #region ISecuredObject implementation + + [IgnoreDataMember] + Guid ISecuredObject.NamespaceId + { + get + { + ArgumentUtility.CheckForNull(m_securedObject, nameof(m_securedObject)); + return m_securedObject.NamespaceId; + } + } + + [IgnoreDataMember] + Int32 ISecuredObject.RequiredPermissions + { + get + { + ArgumentUtility.CheckForNull(m_securedObject, nameof(m_securedObject)); + return m_securedObject.RequiredPermissions; + } + } + + String ISecuredObject.GetToken() + { + ArgumentUtility.CheckForNull(m_securedObject, nameof(m_securedObject)); + return m_securedObject.GetToken(); + } + + private ISecuredObject m_securedObject; + + #endregion + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueueTarget.cs b/src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueueTarget.cs new file mode 100644 index 00000000000..01776936f4e --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/AgentPoolQueueTarget.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Describes how a phase should run against an agent queue. + /// + [DataContract] + public class AgentPoolQueueTarget : PhaseTarget + { + public AgentPoolQueueTarget() + : this(null) + { + } + + internal AgentPoolQueueTarget( + ISecuredObject securedObject) + : base(PhaseTargetType.Agent, securedObject) + { + } + + /// + /// The queue. + /// + [DataMember(EmitDefaultValue = false)] + public AgentPoolQueue Queue + { + get; + set; + } + + /// + /// Agent specification of the target. + /// + [DataMember(EmitDefaultValue = false)] + public AgentSpecification AgentSpecification + { + get; + set; + } + + /// + /// The list of demands required for the queue. + /// + public List Demands + { + get + { + if (m_demands == null) + { + m_demands = new List(); + } + + return m_demands; + } + set + { + m_demands = new List(value); + } + } + + /// + /// The execution options. + /// + [DataMember] + public AgentTargetExecutionOptions ExecutionOptions + { + get; + set; + } + + /// + /// Enables scripts and other processes launched while executing phase to access the OAuth token + /// + [DataMember] + public Boolean AllowScriptsAuthAccessOption + { + get; + set; + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedDemands, ref m_demands, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_demands, ref m_serializedDemands); + } + + [DataMember(Name = "Demands", EmitDefaultValue = false)] + private List m_serializedDemands; + + private List m_demands; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/AgentSpecification.cs b/src/Sdk/BuildWebApi/Api/Contracts/AgentSpecification.cs new file mode 100644 index 00000000000..98becf48779 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/AgentSpecification.cs @@ -0,0 +1,28 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Specification of the agent defined by the pool provider. + /// + [DataContract] + public class AgentSpecification: BaseSecuredObject + { + public AgentSpecification() + { + } + + public AgentSpecification(ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// Agent specification unique identifier. + /// + [DataMember(EmitDefaultValue = false)] + public String Identifier { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptions.cs b/src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptions.cs new file mode 100644 index 00000000000..0c271105b90 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptions.cs @@ -0,0 +1,51 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Additional options for running phases against an agent queue. + /// + [DataContract] + [KnownType(typeof(MultipleAgentExecutionOptions))] + [KnownType(typeof(VariableMultipliersAgentExecutionOptions))] + [JsonConverter(typeof(AgentTargetExecutionOptionsJsonConverter))] + public class AgentTargetExecutionOptions : BaseSecuredObject + { + public AgentTargetExecutionOptions() + : this(AgentTargetExecutionType.Normal) + { + } + + protected AgentTargetExecutionOptions(Int32 type) + : this(type, null) + { + } + + internal AgentTargetExecutionOptions( + ISecuredObject securedObject) + : this(AgentTargetExecutionType.Normal, securedObject) + { + } + + internal AgentTargetExecutionOptions( + Int32 type, + ISecuredObject securedObject) + : base(securedObject) + { + this.Type = type; + } + + /// + /// Indicates the type of execution options. + /// + [DataMember(EmitDefaultValue = true)] + public Int32 Type + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptionsJsonConverter.cs b/src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptionsJsonConverter.cs new file mode 100644 index 00000000000..7d416d90a6e --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/AgentTargetExecutionOptionsJsonConverter.cs @@ -0,0 +1,42 @@ +using System; + +namespace GitHub.Build.WebApi +{ + internal sealed class AgentTargetExecutionOptionsJsonConverter : TypePropertyJsonConverter + { + protected override AgentTargetExecutionOptions GetInstance(Type objectType) + { + if (objectType == typeof(AgentTargetExecutionType)) + { + return new AgentTargetExecutionOptions(); + } + else if (objectType == typeof(VariableMultipliersAgentExecutionOptions)) + { + return new VariableMultipliersAgentExecutionOptions(); + } + else if (objectType == typeof(MultipleAgentExecutionOptions)) + { + return new MultipleAgentExecutionOptions(); + } + else + { + return base.GetInstance(objectType); + } + } + + protected override AgentTargetExecutionOptions GetInstance(Int32 targetType) + { + switch (targetType) + { + case AgentTargetExecutionType.Normal: + return new AgentTargetExecutionOptions(); + case AgentTargetExecutionType.VariableMultipliers: + return new VariableMultipliersAgentExecutionOptions(); + case AgentTargetExecutionType.MultipleAgents: + return new MultipleAgentExecutionOptions(); + default: + return null; + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/ArtifactResource.cs b/src/Sdk/BuildWebApi/Api/Contracts/ArtifactResource.cs new file mode 100644 index 00000000000..550ace133bb --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/ArtifactResource.cs @@ -0,0 +1,95 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public class ArtifactResource : BaseSecuredObject + { + public ArtifactResource() + { + } + + public ArtifactResource( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The type of the resource: File container, version control folder, UNC path, etc. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// Type-specific data about the artifact. + /// + /// + /// For example, "#/10002/5/drop", "$/drops/5", "\\myshare\myfolder\mydrops\5" + /// + [DataMember(EmitDefaultValue = false)] + public String Data + { + get; + set; + } + + /// + /// Type-specific properties of the artifact. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Dictionary Properties + { + get; + set; + } + + /// + /// The full http link to the resource. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Url + { + get; + set; + } + + /// + /// A link to download the resource. + /// + /// + /// This might include things like query parameters to download as a zip file. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String DownloadUrl + { + get; + set; + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Attachment.cs b/src/Sdk/BuildWebApi/Api/Contracts/Attachment.cs new file mode 100644 index 00000000000..d79a726fc67 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Attachment.cs @@ -0,0 +1,51 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an attachment to a build. + /// + [DataContract] + public class Attachment : BaseSecuredObject + { + public Attachment() + { + } + + internal Attachment( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The name of the attachment. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Build.cs b/src/Sdk/BuildWebApi/Api/Contracts/Build.cs new file mode 100644 index 00000000000..4b21bd46c20 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Build.cs @@ -0,0 +1,605 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.ComponentModel.DataAnnotations; +using System.Runtime.Serialization; +using GitHub.Core.WebApi; +//using GitHub.Core.WebApi; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Data representation of a build. + /// + [DataContract] + public class Build : ISecuredObject + { + public Build() + { + Reason = BuildReason.Manual; + Priority = QueuePriority.Normal; + } + + #region BuildReference members + // these are also present in BuildReference. ideally this class would inherit from that. + // however, moving them to a base class changes the order in which they are serialized to xml + // which breaks compat with subscribers (like RM) who may not be on the same milestone + // TODO: remove these when we figure out how to version service bus events + + /// + /// The ID of the build. + /// + [DataMember(EmitDefaultValue = false)] + [Key] + public Int32 Id + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The build number/name of the build. + /// + [DataMember(EmitDefaultValue = false)] + public String BuildNumber + { + get; + set; + } + + /// + /// The status of the build. + /// + [DataMember(EmitDefaultValue = false)] + public BuildStatus? Status + { + get; + set; + } + + /// + /// The build result. + /// + [DataMember(EmitDefaultValue = false)] + public BuildResult? Result + { + get; + set; + } + + /// + /// The time that the build was queued. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? QueueTime + { + get; + set; + } + + /// + /// The time that the build was started. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? StartTime + { + get; + set; + } + + /// + /// The time that the build was completed. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + set; + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + + #endregion + + /// + /// The REST URL of the build. + /// + [DataMember(EmitDefaultValue = false)] + public String Url + { + get; + set; + } + + /// + /// The definition associated with the build. + /// + [DataMember(EmitDefaultValue = false)] + public DefinitionReference Definition + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The build number revision. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? BuildNumberRevision + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The team project. + /// + [DataMember(EmitDefaultValue = false)] + public TeamProjectReference Project + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The URI of the build. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Uri + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The source branch. + /// + [DataMember(EmitDefaultValue = false)] + public String SourceBranch + { + get; + set; + } + + /// + /// The source version. + /// + [DataMember(EmitDefaultValue = false)] + public String SourceVersion + { + get; + set; + } + + /// + /// The queue. This is only set if the definition type is Build. + /// + [DataMember(EmitDefaultValue = false)] + public AgentPoolQueue Queue + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The agent specification for the build. + /// + [DataMember(EmitDefaultValue = false)] + public AgentSpecification AgentSpecification + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The current position of the build in the queue. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? QueuePosition + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The build's priority. + /// + [DataMember(EmitDefaultValue = false)] + public QueuePriority Priority + { + get; + set; + } + + /// + /// The reason that the build was created. + /// + [DataMember(EmitDefaultValue = false)] + public BuildReason Reason + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The identity on whose behalf the build was queued. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef RequestedFor + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The identity that queued the build. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef RequestedBy + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The date the build was last changed. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime LastChangedDate + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The identity representing the process or person that last changed the build. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef LastChangedBy + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The date the build was deleted. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? DeletedDate + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The identity of the process or person that deleted the build. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef DeletedBy + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The description of how the build was deleted. + /// + [DataMember(EmitDefaultValue = false)] + public String DeletedReason + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The parameters for the build. + /// + [DataMember(EmitDefaultValue = false)] + public String Parameters + { + get; + set; + } + + /// + /// A list of demands that represents the agent capabilities required by this build. + /// + [DataMember(EmitDefaultValue = false)] + public List Demands + { + get; + set; + } + + /// + /// The orchestration plan for the build. + /// + [DataMember(EmitDefaultValue = false)] + public TaskOrchestrationPlanReference OrchestrationPlan + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The list of Orchestration plans associated with the build. + /// + /// + /// The build may have plans in addition to the main plan. For example, the cleanup job may have an orchestration plan associated with it. + /// + public List Plans + { + get + { + if (m_plans == null) + { + m_plans = new List(); + } + + return m_plans; + } + set + { + m_plans = value; + } + } + + /// + /// Information about the build logs. + /// + [DataMember(EmitDefaultValue = false)] + public BuildLogReference Logs + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The repository. + /// + [DataMember(EmitDefaultValue = false)] + public BuildRepository Repository + { + get; + set; + } + + /// + /// Additional options for queueing the build. + /// + [DataMember(EmitDefaultValue = false)] + public QueueOptions QueueOptions + { + get; + set; + } + + /// + /// Indicates whether the build has been deleted. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Deleted + { + get; + set; + } + + /// + /// A collection of properties which may be used to extend the storage fields available + /// for a given build. + /// + public PropertiesCollection Properties + { + get + { + if (m_properties == null) + { + m_properties = new PropertiesCollection(); + } + return m_properties; + } + internal set + { + m_properties = value; + } + } + + /// + /// A collection of tags associated with the build. + /// + public List Tags + { + get + { + if (m_tags == null) + { + m_tags = new List(); + } + + return m_tags; + } + internal set + { + m_tags = value; + } + } + + /// + /// The list of validation errors and warnings. + /// + public List ValidationResults + { + get + { + if (m_validationResults == null) + { + m_validationResults = new List(); + } + return m_validationResults; + } + } + + /// + /// Indicates whether the build should be skipped by retention policies. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean? KeepForever + { + get; + set; + } + + /// + /// The quality of the xaml build (good, bad, etc.) + /// + /// + /// This is only used for XAML builds. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Quality + { + get; + set; + } + + /// + /// Indicates whether the build is retained by a release. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean? RetainedByRelease + { + get; + set; + } + + /// + /// The build that triggered this build via a Build completion trigger. + /// + [DataMember] + public Build TriggeredByBuild { get; set; } + + /// + /// Trigger-specific information about the build. + /// + public IDictionary TriggerInfo + { + get + { + if (m_triggerInfo == null) + { + m_triggerInfo = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_triggerInfo; + } + internal set + { + if (value != null) + { + m_triggerInfo = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + } + + [DataMember(EmitDefaultValue = false, Name = "Properties")] + private PropertiesCollection m_properties; + + [DataMember(EmitDefaultValue = false, Name = "Tags")] + private List m_tags; + + [DataMember(EmitDefaultValue = false, Name = "ValidationResults")] + private List m_validationResults; + + /// + /// Orchestration plans associated with the build (build, cleanup) + /// + [DataMember(EmitDefaultValue = false, Name = "Plans")] + private List m_plans; + + /// + /// Sourceprovider-specific information about what triggered the build + /// + /// Added in 3.2-preview.3 + [DataMember(EmitDefaultValue = false, Name = "TriggerInfo")] + private Dictionary m_triggerInfo; + + #region ISecuredObject implementation + + Guid ISecuredObject.NamespaceId => Security.BuildNamespaceId; + + Int32 ISecuredObject.RequiredPermissions => BuildPermissions.ViewBuilds; + + String ISecuredObject.GetToken() + { + if (!String.IsNullOrEmpty(m_nestingToken)) + { + return m_nestingToken; + } + + return ((ISecuredObject)this.Definition)?.GetToken(); + } + + internal void SetNestingSecurityToken(String tokenValue) + { + // Spike: investigate imposing restrictions on the amount of information being returned + // when a nesting security token is being used. + m_nestingToken = tokenValue; + } + + private String m_nestingToken = String.Empty; + #endregion + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildArtifact.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildArtifact.cs new file mode 100644 index 00000000000..62e23a4caa4 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildArtifact.cs @@ -0,0 +1,63 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an artifact produced by a build. + /// + [DataContract] + public class BuildArtifact : BaseSecuredObject + { + public BuildArtifact() + { + } + + internal BuildArtifact( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The artifact ID. + /// + [DataMember] + public Int32 Id + { + get; + set; + } + + /// + /// The name of the artifact. + /// + [DataMember] + public String Name + { + get; + set; + } + + /// + /// The artifact source, which will be the ID of the job that produced this artifact. + /// + [DataMember] + public String Source + { + get; + set; + } + + /// + /// The actual resource. + /// + [DataMember] + public ArtifactResource Resource + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildBadge.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildBadge.cs new file mode 100644 index 00000000000..411db878606 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildBadge.cs @@ -0,0 +1,36 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a build badge. + /// + [DataContract] + public class BuildBadge + { + public BuildBadge() + { + } + + /// + /// The ID of the build represented by this badge. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 BuildId + { + get; + set; + } + + /// + /// A link to the SVG resource. + /// + [DataMember(EmitDefaultValue = false)] + public String ImageUrl + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinition.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinition.cs new file mode 100644 index 00000000000..6afad54cb04 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinition.cs @@ -0,0 +1,365 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Common.Contracts; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a build definition. + /// + [DataContract] + public class BuildDefinition : BuildDefinitionReference + { + public BuildDefinition() + { + this.JobAuthorizationScope = BuildAuthorizationScope.ProjectCollection; + } + + /// + /// The build number format. + /// + [DataMember(EmitDefaultValue = false)] + public String BuildNumberFormat + { + get; + set; + } + + /// + /// A save-time comment for the definition. + /// + [DataMember(EmitDefaultValue = false)] + public String Comment + { + get; + set; + } + + /// + /// The description. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// The drop location for the definition. + /// + [DataMember(EmitDefaultValue = false)] + public String DropLocation + { + get; + set; + } + + /// + /// The job authorization scope for builds queued against this definition. + /// + [DataMember] + public BuildAuthorizationScope JobAuthorizationScope + { + get; + set; + } + + /// + /// The job execution timeout (in minutes) for builds queued against this definition. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobTimeoutInMinutes + { + get; + set; + } + + /// + /// The job cancel timeout (in minutes) for builds cancelled by user for this definition. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobCancelTimeoutInMinutes + { + get; + set; + } + + /// + /// Indicates whether badges are enabled for this definition. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean BadgeEnabled + { + get; + set; + } + + /// + /// The list of steps for this definition. + /// + [Obsolete] + [EditorBrowsable(EditorBrowsableState.Never)] + public List Steps + { + get; + } + + /// + /// The build process. + /// + [DataMember(EmitDefaultValue = false)] + public BuildProcess Process + { + get; + set; + } + + /// + /// A list of build options used by this definition. + /// + public List Options + { + get + { + if (m_options == null) + { + m_options = new List(); + } + + return m_options; + } + internal set + { + m_options = value; + } + } + + /// + /// The repository. + /// + [DataMember(EmitDefaultValue = false)] + public BuildRepository Repository + { + get; + set; + } + + /// + /// The process parameters for this definition. + /// + [DataMember(EmitDefaultValue = false)] + public ProcessParameters ProcessParameters + { + get; + set; + } + + /// + /// The list of triggers for this definition. + /// + public List Triggers + { + get + { + if (m_triggers == null) + { + m_triggers = new List(); + } + + return m_triggers; + } + internal set + { + m_triggers = value; + } + } + + /// + /// The variables used by this definition. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_variables; + } + internal set + { + m_variables = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// The variable groups used by this definition. + /// + public List VariableGroups + { + get + { + if (m_variableGroups == null) + { + m_variableGroups = new List(); + } + + return m_variableGroups; + } + internal set + { + m_variableGroups = value; + } + } + + /// + /// The list of demands that represents the capabilities required by all agents for this definition. + /// + public List Demands + { + get + { + if (m_demands == null) + { + m_demands = new List(); + } + + return m_demands; + } + internal set + { + m_demands = value; + } + } + + /// + /// The list of retention policies for this definition. + /// + public List RetentionRules + { + get + { + if (m_retentionRules == null) + { + m_retentionRules = new List(); + } + + return m_retentionRules; + } + internal set + { + m_retentionRules = value; + } + } + + /// + /// A collection of properties which may be used to extend the storage fields available + /// for a given definition. + /// + public PropertiesCollection Properties + { + get + { + if (m_properties == null) + { + m_properties = new PropertiesCollection(); + } + + return m_properties; + } + internal set + { + m_properties = value; + } + } + + /// + /// A collection of tags associated with the build definition. + /// + public List Tags + { + get + { + if (m_tags == null) + { + m_tags = new List(); + } + + return m_tags; + } + internal set + { + m_tags = value; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedOptions, ref m_options, true); + SerializationHelper.Copy(ref m_serializedTriggers, ref m_triggers, true); + SerializationHelper.Copy(ref m_serializedVariables, ref m_variables, StringComparer.OrdinalIgnoreCase, true); + SerializationHelper.Copy(ref m_serializedVariableGroups, ref m_variableGroups, true); + SerializationHelper.Copy(ref m_serializedDemands, ref m_demands, true); + SerializationHelper.Copy(ref m_serializedRetentionRules, ref m_retentionRules, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_options, ref m_serializedOptions); + SerializationHelper.Copy(ref m_triggers, ref m_serializedTriggers); + SerializationHelper.Copy(ref m_variables, ref m_serializedVariables, StringComparer.OrdinalIgnoreCase); + SerializationHelper.Copy(ref m_variableGroups, ref m_serializedVariableGroups); + SerializationHelper.Copy(ref m_demands, ref m_serializedDemands); + SerializationHelper.Copy(ref m_retentionRules, ref m_serializedRetentionRules); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedOptions = null; + m_serializedTriggers = null; + m_serializedVariables = null; + m_serializedVariableGroups = null; + m_serializedRetentionRules = null; + } + + [DataMember(Name = "Options", EmitDefaultValue = false)] + private List m_serializedOptions; + + [DataMember(Name = "Triggers", EmitDefaultValue = false)] + private List m_serializedTriggers; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_serializedVariables; + + [DataMember(Name = "VariableGroups", EmitDefaultValue = false)] + private List m_serializedVariableGroups; + + [DataMember(Name = "Demands", EmitDefaultValue = false)] + private List m_serializedDemands; + + [DataMember(Name = "RetentionRules", EmitDefaultValue = false)] + private List m_serializedRetentionRules; + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Properties")] + private PropertiesCollection m_properties; + + [DataMember(EmitDefaultValue = false, Name = "Tags")] + private List m_tags; + + private List m_demands; + private List m_options; + private List m_triggers; + private List m_retentionRules; + private List m_variableGroups; + private IDictionary m_variables; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionReference.cs new file mode 100644 index 00000000000..ca2798a5b98 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionReference.cs @@ -0,0 +1,162 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a build definition. + /// + [DataContract] + public class BuildDefinitionReference : DefinitionReference + { + public BuildDefinitionReference() + { + Type = DefinitionType.Build; + QueueStatus = DefinitionQueueStatus.Enabled; + } + + /// + /// The quality of the definition document (draft, etc.) + /// + [DataMember(EmitDefaultValue = false, Name = "Quality")] + public DefinitionQuality? DefinitionQuality + { + get; + set; + } + + /// + /// The author of the definition. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef AuthoredBy + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// A reference to the definition that this definition is a draft of, if this is a draft definition. + /// + [DataMember(EmitDefaultValue = false, Name = "draftOf")] + public DefinitionReference ParentDefinition + { + get; + set; + } + + /// + /// The list of drafts associated with this definition, if this is not a draft definition. + /// + [DataMember(EmitDefaultValue = false)] + public List Drafts + { + get + { + return m_drafts ?? (m_drafts = new List()); + } + internal set + { + m_drafts = value; + } + } + + /// + /// The default queue for builds run against this definition. + /// + [DataMember(EmitDefaultValue = false)] + public AgentPoolQueue Queue + { + get; + set; + } + + /// + /// The metrics for this definition. + /// + public List Metrics + { + get + { + return m_metrics ?? (m_metrics = new List()); + } + internal set + { + m_metrics = value; + } + } + + /// + /// The latest build for this definition. + /// + public Build LatestBuild + { + get + { + return m_latestBuild; + } + internal set + { + m_latestBuild = value; + } + } + + /// + /// The latest completed build for this definition. + /// + public Build LatestCompletedBuild + { + get + { + return m_latestCompletedBuild; + } + internal set + { + m_latestCompletedBuild = value; + } + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links => m_links ?? (m_links = new ReferenceLinks()); + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedMetrics, ref m_metrics, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_metrics, ref m_serializedMetrics); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedMetrics = null; + } + + [DataMember(Name = "Metrics", EmitDefaultValue = false)] + private List m_serializedMetrics; + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + + private List m_metrics; + + private List m_drafts; + + [DataMember(EmitDefaultValue = false, Name = "LatestBuild")] + private Build m_latestBuild; + + [DataMember(EmitDefaultValue = false, Name = "LatestCompletedBuild")] + private Build m_latestCompletedBuild; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionRevision.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionRevision.cs new file mode 100644 index 00000000000..b8626596163 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionRevision.cs @@ -0,0 +1,86 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a revision of a build definition. + /// + [DataContract] + public class BuildDefinitionRevision + { + /// + /// The revision number. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Revision + { + get; + set; + } + + /// + /// The name of the definition. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The identity of the person or process that changed the definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false, Order = 30)] + public IdentityRef ChangedBy + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The date and time that the definition was changed. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime ChangedDate + { + get; + set; + } + + /// + /// The change type (add, edit, delete). + /// + [DataMember(EmitDefaultValue = false)] + public AuditAction ChangeType + { + get; + set; + } + + /// + /// The comment associated with the change. + /// + [DataMember(EmitDefaultValue = false)] + public String Comment + { + get; + set; + } + + /// + /// A link to the definition at this revision. + /// + [DataMember(EmitDefaultValue = false)] + public String DefinitionUrl + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionStep.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionStep.cs new file mode 100644 index 00000000000..3231fde4612 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionStep.cs @@ -0,0 +1,250 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// A reference to a task definition. + /// + [DataContract] + public class TaskDefinitionReference : BaseSecuredObject + { + public TaskDefinitionReference() + { + } + + public TaskDefinitionReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the task. + /// + [DataMember(IsRequired = true)] + public Guid Id + { + get; + set; + } + + /// + /// The version of the task. + /// + [DataMember(IsRequired = true)] + public String VersionSpec + { + get; + set; + } + + /// + /// The type of task (task or task group). + /// + [DataMember(IsRequired = false)] + public String DefinitionType + { + get; + set; + } + + /// + /// A clone of this reference. + /// + /// + public TaskDefinitionReference Clone() + { + return (TaskDefinitionReference)this.MemberwiseClone(); + } + } + + /// + /// Represents a step in a build phase. + /// + [DataContract] + public class BuildDefinitionStep : BaseSecuredObject + { + public BuildDefinitionStep() + { + } + + internal BuildDefinitionStep( + ISecuredObject securedObject) + : base(securedObject) + { + } + + private BuildDefinitionStep(BuildDefinitionStep toClone) + { + ArgumentUtility.CheckForNull(toClone, nameof(toClone)); + + this.Enabled = toClone.Enabled; + this.ContinueOnError = toClone.ContinueOnError; + this.AlwaysRun = toClone.AlwaysRun; + this.DisplayName = toClone.DisplayName; + this.TimeoutInMinutes = toClone.TimeoutInMinutes; + this.Condition = toClone.Condition; + this.RefName = toClone.RefName; + + // Cloning the reference type variables since memberwiseclone does a shallow copy + if (toClone.TaskDefinition != null) + { + this.TaskDefinition = toClone.TaskDefinition.Clone(); + } + + if (toClone.m_inputs != null) + { + foreach (var property in toClone.m_inputs) + { + this.Inputs.Add(property.Key, property.Value); + } + } + + if (toClone.m_environment != null) + { + foreach (var property in toClone.m_environment) + { + this.Environment.Add(property.Key, property.Value); + } + } + } + + /// + /// The task associated with this step. + /// + [DataMember(IsRequired = true, Order = 1, Name = "Task")] + public TaskDefinitionReference TaskDefinition + { + get; + set; + } + + /// + /// The inputs used by this step. + /// + public IDictionary Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_inputs; + } + set + { + m_inputs = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Indicates whether the step is enabled. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean Enabled + { + get; + set; + } + + /// + /// Indicates whether the phase should continue even if this step fails. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean ContinueOnError + { + get; + set; + } + + /// + /// Indicates whether this step should run even if a previous step fails. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean AlwaysRun + { + get; + set; + } + + /// + /// The display name for this step. + /// + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + /// + /// The time, in minutes, that this step is allowed to run. + /// + [DataMember(EmitDefaultValue = true)] + public Int32 TimeoutInMinutes + { + get; + set; + } + + /// + /// A condition that determines whether this step should run. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String Condition + { + get; + set; + } + + /// + /// The reference name for this step. + /// + [DataMember(EmitDefaultValue = false)] + public String RefName + { + get; + set; + } + + /// + /// The run-time environment for this step. + /// + public IDictionary Environment + { + get + { + if (m_environment == null) + { + m_environment = new Dictionary(StringComparer.Ordinal); + } + + return m_environment; + } + set + { + m_environment = new Dictionary(value, StringComparer.Ordinal); + } + } + + /// + /// A clone of this step. + /// + /// + public BuildDefinitionStep Clone() + { + return new BuildDefinitionStep(this); + } + + [DataMember(Name = "Environment", EmitDefaultValue = false)] + private Dictionary m_environment; + + [DataMember(Name = "Inputs", EmitDefaultValue = false, Order = 2)] + private Dictionary m_inputs; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionTemplate.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionTemplate.cs new file mode 100644 index 00000000000..7137f663d51 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionTemplate.cs @@ -0,0 +1,122 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a template from which new build definitions can be created. + /// + [DataContract] + public class BuildDefinitionTemplate + { + + public BuildDefinitionTemplate() + { + Category = "Custom"; + } + + /// + /// The ID of the template. + /// + [DataMember(IsRequired = true)] + public String Id + { + get; + set; + } + + /// + /// The name of the template. + /// + [DataMember(IsRequired = true)] + public String Name + { + get; + set; + } + + /// + /// Indicates whether the template can be deleted. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean CanDelete + { + get; + set; + } + + /// + /// The template category. + /// + [DataMember(EmitDefaultValue = true)] + public String Category + { + get; + set; + } + + /// + /// An optional hosted agent queue for the template to use by default. + /// + [DataMember(EmitDefaultValue = true)] + public String DefaultHostedQueue + { + get; + set; + } + + /// + /// The ID of the task whose icon is used when showing this template in the UI. + /// + [DataMember(EmitDefaultValue = false)] + public Guid IconTaskId + { + get; + set; + } + + /// + /// A description of the template. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// The actual template. + /// + [DataMember(EmitDefaultValue = false)] + public BuildDefinition Template + { + get; + set; + } + + /// + /// A dictionary of media type strings to icons for this template. + /// + public IDictionary Icons + { + get + { + if (m_icons == null) + { + m_icons = new Dictionary(StringComparer.Ordinal); + } + + return m_icons; + } + internal set + { + m_icons = new Dictionary(value, StringComparer.Ordinal); + } + } + + [DataMember(EmitDefaultValue = false, Name = "Icons")] + private Dictionary m_icons; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionVariable.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionVariable.cs new file mode 100644 index 00000000000..51f35941e30 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildDefinitionVariable.cs @@ -0,0 +1,69 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a variable used by a build definition. + /// + [DataContract] + public class BuildDefinitionVariable : BaseSecuredObject + { + public BuildDefinitionVariable() + { + } + + internal BuildDefinitionVariable( + ISecuredObject securedObject) + : base(securedObject) + { + } + + private BuildDefinitionVariable(BuildDefinitionVariable variableToClone) + { + Value = variableToClone.Value; + AllowOverride = variableToClone.AllowOverride; + IsSecret = variableToClone.IsSecret; + } + + /// + /// The value of the variable. + /// + [DataMember(EmitDefaultValue = true)] + public String Value + { + get; + set; + } + + /// + /// Indicates whether the value can be set at queue time. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean AllowOverride + { + get; + set; + } + + /// + /// Indicates whether the variable's value is a secret. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsSecret + { + get; + set; + } + + /// + /// A clone of this BuildDefinitionVariable. + /// + /// A new BuildDefinitionVariable + public BuildDefinitionVariable Clone() + { + return new BuildDefinitionVariable(this); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildLog.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildLog.cs new file mode 100644 index 00000000000..e8acba1cbf1 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildLog.cs @@ -0,0 +1,53 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a build log. + /// + [DataContract] + public class BuildLog : BuildLogReference + { + public BuildLog() + { + } + + public BuildLog( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The number of lines in the log. + /// + [DataMember(EmitDefaultValue = false)] + public Int64 LineCount + { + get; + set; + } + + /// + /// The date and time the log was created. + /// + [DataMember] + public DateTime? CreatedOn + { + get; + set; + } + + /// + /// The date and time the log was last changed. + /// + [DataMember] + public DateTime? LastChangedOn + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildLogReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildLogReference.cs new file mode 100644 index 00000000000..a169ce688ed --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildLogReference.cs @@ -0,0 +1,54 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a build log. + /// + [DataContract] + public class BuildLogReference : BaseSecuredObject + { + public BuildLogReference() + { + } + + internal BuildLogReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the log. + /// + // EmitDefaultValue is true to ensure that id = 0 is sent for XAML builds' "ActivityLog.xml" + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public Int32 Id + { + get; + set; + } + + /// + /// The type of the log location. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// A full link to the log resource. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Url + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildMetric.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildMetric.cs new file mode 100644 index 00000000000..031c5554f45 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildMetric.cs @@ -0,0 +1,63 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents metadata about builds in the system. + /// + [DataContract] + public class BuildMetric : BaseSecuredObject + { + public BuildMetric() + { + } + + internal BuildMetric( + ISecuredObject securedObject) + :base(securedObject) + { + } + + /// + /// The name of the metric. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The scope. + /// + [DataMember(EmitDefaultValue = false)] + public String Scope + { + get; + set; + } + + /// + /// The value. + /// + [DataMember(EmitDefaultValue = true)] + public Int32 IntValue + { + get; + set; + } + + /// + /// The date for the scope. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? Date + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildOption.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildOption.cs new file mode 100644 index 00000000000..46ea79f3686 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildOption.cs @@ -0,0 +1,67 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the application of an optional behavior to a build definition. + /// + [DataContract] + public class BuildOption : BaseSecuredObject + { + public BuildOption() + { + } + + internal BuildOption( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// A reference to the build option. + /// + [DataMember(IsRequired = true, Order = 1, Name = "Definition")] + public virtual BuildOptionDefinitionReference BuildOptionDefinition + { + get; + set; + } + + /// + /// The inputs that configure the behavior. + /// + public virtual IDictionary Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_inputs; + } + internal set + { + m_inputs = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Indicates whether the behavior is enabled. + /// + [DataMember(EmitDefaultValue = true)] + public virtual Boolean Enabled + { + get; + set; + } + + [DataMember(Name = "Inputs", EmitDefaultValue = false, Order = 2)] + private Dictionary m_inputs; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinition.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinition.cs new file mode 100644 index 00000000000..7aed376f944 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinition.cs @@ -0,0 +1,100 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an optional behavior that can be applied to a build definition. + /// + [DataContract] + public class BuildOptionDefinition : BuildOptionDefinitionReference + { + public BuildOptionDefinition() + { + } + + internal BuildOptionDefinition( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// A value that indicates the relative order in which the behavior should be applied. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Ordinal + { + get; + set; + } + + /// + /// The name of the build option. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The description. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// The list of inputs defined for the build option. + /// + [DataMember(EmitDefaultValue = false)] + public IList Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new List(); + } + + return m_inputs; + } + set + { + m_inputs = new List(value); + } + } + + /// + /// The list of input groups defined for the build option. + /// + [DataMember(EmitDefaultValue = false)] + public IList Groups + { + get + { + if (m_groups == null) + { + m_groups = new List(); + } + + return m_groups; + } + set + { + m_groups = new List(value); + } + } + + private List m_inputs; + + private List m_groups; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinitionReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinitionReference.cs new file mode 100644 index 00000000000..c443bf619c6 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionDefinitionReference.cs @@ -0,0 +1,33 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a build option definition. + /// + [DataContract] + public class BuildOptionDefinitionReference : BaseSecuredObject + { + public BuildOptionDefinitionReference() + { + } + + internal BuildOptionDefinitionReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the referenced build option. + /// + [DataMember(IsRequired = true, Order = 1)] + public Guid Id + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionGroupDefinition.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionGroupDefinition.cs new file mode 100644 index 00000000000..c0a02b914a2 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionGroupDefinition.cs @@ -0,0 +1,53 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a group of inputs for a build option. + /// + [DataContract] + public class BuildOptionGroupDefinition : BaseSecuredObject + { + public BuildOptionGroupDefinition() + { + } + + internal BuildOptionGroupDefinition( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The internal name of the group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The name of the group to display in the UI. + /// + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + /// + /// Indicates whether the group is initially displayed as expanded in the UI. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsExpanded + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionInputDefinition.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionInputDefinition.cs new file mode 100644 index 00000000000..3e6f40ef897 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildOptionInputDefinition.cs @@ -0,0 +1,144 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an input for a build option. + /// + [DataContract] + public class BuildOptionInputDefinition : BaseSecuredObject + { + public BuildOptionInputDefinition() + : this(null) + { + } + + internal BuildOptionInputDefinition( + ISecuredObject securedObject) + : base(securedObject) + { + InputType = BuildOptionInputType.String; + DefaultValue = String.Empty; + Required = false; + } + + /// + /// The name of the input. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The label for the input. + /// + [DataMember(EmitDefaultValue = false)] + public String Label + { + get; + set; + } + + /// + /// The default value. + /// + [DataMember(EmitDefaultValue = false)] + public String DefaultValue + { + get; + set; + } + + /// + /// Indicates whether the input is required to have a value. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Required + { + get; + set; + } + + /// + /// Indicates the type of the input value. + /// + [DataMember(Name = "Type")] + public BuildOptionInputType InputType + { + get; + set; + } + + /// + /// The rule that is applied to determine whether the input is visible in the UI. + /// + [DataMember(EmitDefaultValue = false)] + public String VisibleRule + { + // Typical format is "NAMEOFTHEDEPENDENTINPUT = VALUETOBEBOUND" + get; + set; + } + + /// + /// The name of the input group that this input belongs to. + /// + [DataMember(EmitDefaultValue = false)] + public String GroupName + { + get; + set; + } + + /// + /// A dictionary of options for this input. + /// + public Dictionary Options + { + get + { + if (m_Options == null) + { + m_Options = new Dictionary(); + } + return m_Options; + } + set + { + m_Options = value; + } + } + + /// + /// A dictionary of help documents for this input. + /// + public Dictionary HelpDocuments + { + get + { + if (m_HelpDocuments == null) + { + m_HelpDocuments = new Dictionary(); + } + + return m_HelpDocuments; + } + set + { + m_HelpDocuments = new Dictionary(value); + } + } + + [DataMember(Name = "Options", EmitDefaultValue = false)] + private Dictionary m_Options; + + [DataMember(Name = "Help", EmitDefaultValue = false)] + private Dictionary m_HelpDocuments; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildProcess.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildProcess.cs new file mode 100644 index 00000000000..f9514e43e98 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildProcess.cs @@ -0,0 +1,45 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a build process. + /// + [DataContract] + [KnownType(typeof(DesignerProcess))] + [KnownType(typeof(YamlProcess))] + [KnownType(typeof(DockerProcess))] + [KnownType(typeof(JustInTimeProcess))] + [JsonConverter(typeof(BuildProcessJsonConverter))] + public class BuildProcess : BaseSecuredObject + { + protected BuildProcess( + Int32 type) + { + } + + protected internal BuildProcess( + Int32 type, + ISecuredObject securedObject) + : base(securedObject) + { + this.Type = type; + } + + /// + /// The type of the process. + /// + /// + /// See for a list of valid process types. + /// + [DataMember(Name = "Type")] + public Int32 Type + { + get; + internal set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildProcessJsonConverter.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildProcessJsonConverter.cs new file mode 100644 index 00000000000..5f951beae07 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildProcessJsonConverter.cs @@ -0,0 +1,68 @@ +using System; +using Newtonsoft.Json.Linq; + +namespace GitHub.Build.WebApi +{ + internal sealed class BuildProcessJsonConverter : TypePropertyJsonConverter + { + protected override BuildProcess GetInstance( + Type objectType) + { + if (objectType == typeof(DesignerProcess)) + { + return new DesignerProcess(); + } + else if (objectType == typeof(YamlProcess)) + { + return new YamlProcess(); + } + else if (objectType == typeof(DockerProcess)) + { + return new DockerProcess(); + } + else if (objectType == typeof(JustInTimeProcess)) + { + return new JustInTimeProcess(); + } + else + { + return base.GetInstance(objectType); + } + } + + protected override BuildProcess GetInstance( + Int32 targetType) + { + switch (targetType) + { + case ProcessType.Yaml: + return new YamlProcess(); + case ProcessType.Docker: + return new DockerProcess(); + case ProcessType.JustInTime: + return new JustInTimeProcess(); + case ProcessType.Designer: + default: + return new DesignerProcess(); + } + } + + protected override Boolean TryInferType( + JObject value, + out Int32 type) + { + // if it has a YamlFilename property, assume it's a YamlProcess + if (value.TryGetValue("yamlFilename", StringComparison.OrdinalIgnoreCase, out JToken yamlFilename)) + { + type = ProcessType.Yaml; + return true; + } + else + { + // default to Designer process + type = ProcessType.Designer; + return true; + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildProcessResources.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildProcessResources.cs new file mode 100644 index 00000000000..6d0a910d661 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildProcessResources.cs @@ -0,0 +1,247 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents resources used by a build process. + /// + [DataContract] + public sealed class BuildProcessResources : BaseSecuredObject + { + public BuildProcessResources() + { + } + + internal BuildProcessResources( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// Information about the queues used by the process. + /// + public IList Queues + { + get + { + if (m_queues == null) + { + m_queues = new List(); + } + return m_queues; + } + set + { + m_queues = new List(value); + } + } + + /// + /// Information about the endpoints used by the process. + /// + public IList Endpoints + { + get + { + if (m_endpoints == null) + { + m_endpoints = new List(); + } + return m_endpoints; + } + set + { + m_endpoints = new List(value); + } + } + + /// + /// Information about the secure files used by the process. + /// + public IList Files + { + get + { + if (m_files == null) + { + m_files = new List(); + } + return m_files; + } + set + { + m_files = new List(value); + } + } + + /// + /// Information about the variable groups used by the process. + /// + public IList VariableGroups + { + get + { + if (m_variableGroups == null) + { + m_variableGroups = new List(); + } + return m_variableGroups; + } + set + { + m_variableGroups = new List(value); + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_queues?.Count == 0) + { + m_queues = null; + } + + if (m_endpoints?.Count == 0) + { + m_endpoints = null; + } + + if (m_files?.Count == 0) + { + m_files = null; + } + + if (m_variableGroups?.Count == 0) + { + m_variableGroups = null; + } + } + + [DataMember(Name = "Queues", EmitDefaultValue = false)] + private List m_queues; + + [DataMember(Name = "Endpoints", EmitDefaultValue = false)] + private List m_endpoints; + + [DataMember(Name = "Files", EmitDefaultValue = false)] + private List m_files; + + [DataMember(Name = "VariableGroups", EmitDefaultValue = false)] + private List m_variableGroups; + } + + /// + /// Represents a reference to a resource. + /// + [DataContract] + public abstract class ResourceReference : BaseSecuredObject + { + public ResourceReference() + { + } + + protected ResourceReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// An alias to be used when referencing the resource. + /// + [DataMember(EmitDefaultValue = false)] + public String Alias + { + get; + set; + } + } + + /// + /// Represents a reference to an agent queue. + /// + [DataContract] + public class AgentPoolQueueReference : ResourceReference + { + public AgentPoolQueueReference() + : this(null) + { + } + + internal AgentPoolQueueReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the queue. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + } + + /// + /// Represents a referenec to a service endpoint. + /// + [DataContract] + public class ServiceEndpointReference : ResourceReference + { + public ServiceEndpointReference() + : this(null) + { + } + + internal ServiceEndpointReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the service endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + } + + /// + /// Represents a reference to a secure file. + /// + [DataContract] + public class SecureFileReference : ResourceReference + { + public SecureFileReference() + : this(null) + { + } + + internal SecureFileReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the secure file. + /// + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildReference.cs new file mode 100644 index 00000000000..18833250b90 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildReference.cs @@ -0,0 +1,138 @@ +using GitHub.Services.WebApi; +using System; +using System.ComponentModel; +using System.ComponentModel.DataAnnotations; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a build. + /// + [DataContract] + public class BuildReference : BaseSecuredObject + { + public BuildReference() + { + } + + internal BuildReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the build. + /// + [DataMember(EmitDefaultValue = false)] + [Key] + public Int32 Id + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The build number. + /// + [DataMember(EmitDefaultValue = false)] + public String BuildNumber + { + get; + set; + } + + /// + /// The build status. + /// + [DataMember(EmitDefaultValue = false)] + public BuildStatus? Status + { + get; + set; + } + + /// + /// The build result. + /// + [DataMember(EmitDefaultValue = false)] + public BuildResult? Result + { + get; + set; + } + + /// + /// The time that the build was queued. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? QueueTime + { + get; + set; + } + + /// + /// The time that the build was started. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? StartTime + { + get; + set; + } + + /// + /// The time that the build was completed. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + set; + } + + /// + /// The identity on whose behalf the build was queued. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef RequestedFor + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// Indicates whether the build has been deleted. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Deleted + { + get; + set; + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildReportMetadata.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildReportMetadata.cs new file mode 100644 index 00000000000..e5467f9be77 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildReportMetadata.cs @@ -0,0 +1,55 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents information about a build report. + /// + [DataContract] + public class BuildReportMetadata + { + public BuildReportMetadata() + { + } + + public BuildReportMetadata(Int32 buildId, String type) + { + this.BuildId = buildId; + this.Type = type; + } + + /// + /// The Id of the build. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 BuildId + { + get; + set; + } + + /// + /// The content of the report. + /// + [DataMember(EmitDefaultValue = false)] + public String Content + { + get; + set; + } + + /// + /// The type of the report. + /// + /// + /// See for a list of supported report types. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildRepository.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildRepository.cs new file mode 100644 index 00000000000..be2f2af5ce6 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildRepository.cs @@ -0,0 +1,185 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a repository used by a build definition. + /// + [DataContract] + public class BuildRepository : BaseSecuredObject + { + public BuildRepository() + { + } + + internal BuildRepository( + ISecuredObject securedObject) + : base(securedObject) + { + } + + private BuildRepository( + BuildRepository toClone) + : base(toClone) + { + this.Id = toClone.Id; + this.Type = toClone.Type; + this.Name = toClone.Name; + this.Url = toClone.Url; + this.DefaultBranch = toClone.DefaultBranch; + this.RootFolder = toClone.RootFolder; + this.Clean = toClone.Clean; + this.CheckoutSubmodules = toClone.CheckoutSubmodules; + + if (toClone.m_properties != null) + { + foreach (var property in toClone.m_properties) + { + this.Properties.Add(property.Key, property.Value); + } + } + } + + /// + /// The ID of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String Id + { + get; + set; + } + + /// + /// The type of the repository. + /// + [DataMember] + public String Type + { + get; + set; + } + + /// + /// The friendly name of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The URL of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Url + { + get; + set; + } + + /// + /// The name of the default branch. + /// + [DataMember(EmitDefaultValue = false)] + public String DefaultBranch + { + get; + set; + } + + /// + /// The root folder. + /// + [DataMember(EmitDefaultValue = false)] + public String RootFolder + { + get; + set; + } + + /// + /// Indicates whether to clean the target folder when getting code from the repository. + /// + /// + /// This is a String so that it can reference variables. + /// + [DataMember(EmitDefaultValue = true)] + public String Clean + { + get; + set; + } + + /// + /// Indicates whether to checkout submodules. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean CheckoutSubmodules + { + get; + set; + } + + /// + /// A dictionary that holds additional information about the repository. + /// + public IDictionary Properties + { + // Warning: This can contain secrets too. As part of #952656, we resolve secrets, it was done considering the fact that this is not a "DataMember" + // If it's ever made a "DataMember" please be cautious, we would be leaking secrets + get + { + if (m_properties == null) + { + m_properties = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_properties; + } + internal set + { + m_properties = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Clones this object. + /// + /// + public BuildRepository Clone() + { + return new BuildRepository(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedProperties, ref m_properties, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_properties, ref m_serializedProperties, StringComparer.OrdinalIgnoreCase); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedProperties = null; + } + + [DataMember(Name = "Properties", EmitDefaultValue = false)] + private IDictionary m_serializedProperties; + + // Warning: This can contain secrets too. As part of #952656, we resolve secrets, it was done considering the fact that this is not a "DataMember" + // If it's ever made a "DataMember" please be cautious, we would be leaking secrets + private IDictionary m_properties; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildRequestValidationResult.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildRequestValidationResult.cs new file mode 100644 index 00000000000..6216f8cea4e --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildRequestValidationResult.cs @@ -0,0 +1,43 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the result of validating a build request. + /// + [DataContract] + public class BuildRequestValidationResult : BaseSecuredObject + { + public BuildRequestValidationResult() + { + } + + public BuildRequestValidationResult( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The result. + /// + [DataMember] + public ValidationResult Result + { + get; + set; + } + + /// + /// The message associated with the result. + /// + [DataMember] + public String Message + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildResourceUsage.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildResourceUsage.cs new file mode 100644 index 00000000000..a66d6ce15d5 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildResourceUsage.cs @@ -0,0 +1,64 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents information about resources used by builds in the system. + /// + [DataContract] + public sealed class BuildResourceUsage + { + internal BuildResourceUsage() + { + } + + internal BuildResourceUsage(Int32 xaml, Int32 dtAgents, Int32 paidAgentSlots, Boolean isThrottlingEnabled = false) + { + this.XamlControllers = xaml; + this.DistributedTaskAgents = dtAgents; + this.TotalUsage = this.XamlControllers + (isThrottlingEnabled ? 0 : this.DistributedTaskAgents); + this.PaidPrivateAgentSlots = paidAgentSlots; + } + + /// + /// The number of XAML controllers. + /// + [DataMember] + public Int32 XamlControllers + { + get; + internal set; + } + + /// + /// The number of build agents. + /// + [DataMember] + public Int32 DistributedTaskAgents + { + get; + internal set; + } + + /// + /// The total usage. + /// + [DataMember] + public Int32 TotalUsage + { + get; + internal set; + } + + /// + /// The number of paid private agent slots. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 PaidPrivateAgentSlots + { + get; + internal set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildSettings.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildSettings.cs new file mode 100644 index 00000000000..9a2612747ce --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildSettings.cs @@ -0,0 +1,54 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents system-wide build settings. + /// + [DataContract] + public class BuildSettings : BaseSecuredObject + { + public BuildSettings() + : this(null) + { + } + + public BuildSettings( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The default retention policy. + /// + [DataMember] + public RetentionPolicy DefaultRetentionPolicy + { + get; + set; + } + + /// + /// The maximum retention policy. + /// + [DataMember] + public RetentionPolicy MaximumRetentionPolicy + { + get; + set; + } + + /// + /// The number of days to keep records of deleted builds. + /// + [DataMember] + public Int32 DaysToKeepDeletedBuildsBeforeDestroy + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildTrigger.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildTrigger.cs new file mode 100644 index 00000000000..c7e2f9c9952 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildTrigger.cs @@ -0,0 +1,446 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a trigger for a buld definition. + /// + [DataContract] + [KnownType(typeof(ContinuousIntegrationTrigger))] + [KnownType(typeof(GatedCheckInTrigger))] + [KnownType(typeof(ScheduleTrigger))] + [KnownType(typeof(PullRequestTrigger))] + [JsonConverter(typeof(BuildTriggerJsonConverter))] + public abstract class BuildTrigger : BaseSecuredObject + { + protected BuildTrigger(DefinitionTriggerType triggerType) + : this(triggerType, null) + { + } + + protected internal BuildTrigger( + DefinitionTriggerType triggerType, + ISecuredObject securedObject) + : base(securedObject) + { + this.TriggerType = triggerType; + } + + /// + /// The type of the trigger. + /// + [DataMember] + public DefinitionTriggerType TriggerType + { + get; + private set; + } + } + + /// + /// Represents a continuous integration (CI) trigger. + /// + [DataContract] + public sealed class ContinuousIntegrationTrigger : BuildTrigger + { + public ContinuousIntegrationTrigger() + : this(null) + { + } + + internal ContinuousIntegrationTrigger( + ISecuredObject securedObject) + : base(DefinitionTriggerType.ContinuousIntegration, securedObject) + { + MaxConcurrentBuildsPerBranch = 1; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public Int32 SettingsSourceType + { + get + { + if (m_settingsSourceType == 0) + { + m_settingsSourceType = WebApi.SettingsSourceType.Definition; + } + + return m_settingsSourceType; + } + set + { + m_settingsSourceType = value; + } + } + + /// + /// Indicates whether changes should be batched while another CI build is running. + /// + /// + /// If this is true, then changes submitted while a CI build is running will be batched and built in one new CI build when the current build finishes. + /// If this is false, then a new CI build will be triggered for each change to the repository. + /// + [DataMember] + public Boolean BatchChanges + { + get; + set; + } + + /// + /// The maximum number of simultaneous CI builds that will run per branch. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 MaxConcurrentBuildsPerBranch + { + get; + set; + } + + /// + /// A list of filters that describe which branches will trigger builds. + /// + public List BranchFilters + { + get + { + if (m_branchFilters == null) + { + m_branchFilters = new List(); + } + + return m_branchFilters; + } + internal set + { + m_branchFilters = value; + } + } + + // added in 3.0 + /// + /// A list of filters that describe which paths will trigger builds. + /// + public List PathFilters + { + get + { + if (m_pathFilters == null) + { + m_pathFilters = new List(); + } + + return m_pathFilters; + } + internal set + { + m_pathFilters = value; + } + } + + /// + /// The polling interval, in seconds. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32? PollingInterval + { + get; + set; + } + + /// + /// The ID of the job used to poll an external repository. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid PollingJobId + { + // This is the ID of the polling job that polls the external repository. + // Once the build definition is saved/updated, this value is set. + get; + set; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_settingsSourceType == WebApi.SettingsSourceType.Definition) + { + m_settingsSourceType = 0; + } + } + + [DataMember(Name = "BranchFilters", EmitDefaultValue = false)] + private List m_branchFilters; + + [DataMember(Name = "PathFilters", EmitDefaultValue = false)] + private List m_pathFilters; + + [DataMember(Name = "SettingsSourceType", EmitDefaultValue = false)] + private Int32 m_settingsSourceType; + } + + /// + /// Represents a gated check-in trigger. + /// + [DataContract] + public sealed class GatedCheckInTrigger : BuildTrigger + { + public GatedCheckInTrigger() + : this(null) + { + } + + internal GatedCheckInTrigger( + ISecuredObject securedObject) + : base(DefinitionTriggerType.GatedCheckIn, securedObject) + { + } + + /// + /// Indicates whether CI triggers should run after the gated check-in succeeds. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean RunContinuousIntegration + { + get; + set; + } + + /// + /// Indicates whether to take workspace mappings into account when determining whether a build should run. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean UseWorkspaceMappings + { + get; + set; + } + + /// + /// A list of filters that describe which paths will trigger builds. + /// + public List PathFilters + { + get + { + if (m_pathFilters == null) + { + m_pathFilters = new List(); + } + + return m_pathFilters; + } + internal set + { + m_pathFilters = value; + } + } + + [DataMember(Name = "PathFilters", EmitDefaultValue = false)] + private List m_pathFilters; + } + + /// + /// Represents a schedule trigger. + /// + [DataContract] + public sealed class ScheduleTrigger : BuildTrigger + { + public ScheduleTrigger() + : this(null) + { + } + + internal ScheduleTrigger( + ISecuredObject securedObject) + : base(DefinitionTriggerType.Schedule, securedObject) + { + } + + /// + /// A list of schedule entries that describe when builds should run. + /// + public List Schedules + { + get + { + if (m_schedules == null) + { + m_schedules = new List(); + } + + return m_schedules; + } + set + { + m_schedules = value; + } + } + + [DataMember(Name = "Schedules", EmitDefaultValue = false)] + private List m_schedules; + } + + /// + /// Represents a pull request trigger. + /// + [DataContract] + public class PullRequestTrigger : BuildTrigger + { + public PullRequestTrigger() + : this(null) + { + } + + internal PullRequestTrigger( + ISecuredObject securedObject) + : base(DefinitionTriggerType.PullRequest, securedObject) + { + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public Int32 SettingsSourceType + { + get + { + if (m_settingsSourceType == 0) + { + m_settingsSourceType = WebApi.SettingsSourceType.Definition; + } + + return m_settingsSourceType; + } + set + { + m_settingsSourceType = value; + } + } + + + /// + /// Describes if forks of a selected repository should build and use secrets. + /// + public Forks Forks + { + get + { + if (m_forks == null) + { + m_forks = new Forks(); + } + return m_forks; + } + set + { + m_forks = value; + } + } + + /// + /// A list of filters that describe which branches will trigger builds. + /// + public List BranchFilters + { + get + { + if (m_branchFilters == null) + { + m_branchFilters = new List(); + } + + return m_branchFilters; + } + set + { + m_branchFilters = value; + } + } + + /// + /// A list of filters that describe which paths will trigger builds. + /// + public List PathFilters + { + get + { + if (m_pathFilters == null) + { + m_pathFilters = new List(); + } + + return m_pathFilters; + } + set + { + m_pathFilters = value; + } + } + + /// + /// Indicates if an update to a PR should delete current in-progress builds. + /// + [DataMember(Name = "AutoCancel", EmitDefaultValue = false)] + public Boolean? AutoCancel { get; set; } + + [DataMember(Name = "RequireCommentsForNonTeamMembersOnly")] + public Boolean RequireCommentsForNonTeamMembersOnly { get; set; } + + [DataMember(Name = "IsCommentRequiredForPullRequest")] + public Boolean IsCommentRequiredForPullRequest { get; set; } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_settingsSourceType == WebApi.SettingsSourceType.Definition) + { + m_settingsSourceType = 0; + } + } + + [DataMember(Name = "SettingsSourceType", EmitDefaultValue = false)] + private Int32 m_settingsSourceType; + + [DataMember(Name = "BranchFilters", EmitDefaultValue = false)] + private List m_branchFilters; + + [DataMember(Name = "Forks", EmitDefaultValue = false)] + private Forks m_forks; + + [DataMember(Name = "PathFilters", EmitDefaultValue = false)] + private List m_pathFilters; + } + + /// + /// Represents a build completion trigger. + /// + [DataContract] + public class BuildCompletionTrigger : BuildTrigger + { + public BuildCompletionTrigger() + : this(null) + { + } + public BuildCompletionTrigger( + ISecuredObject securedObject) + : base(DefinitionTriggerType.BuildCompletion, securedObject) + { + } + + /// + /// A reference to the definition that should trigger builds for this definition. + /// + [DataMember] + public DefinitionReference Definition { get; set; } + + [DataMember] + public Boolean RequiresSuccessfulBuild { get; set; } + + [DataMember] + public List BranchFilters { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildTriggerJsonConverter.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildTriggerJsonConverter.cs new file mode 100644 index 00000000000..52b754ede62 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildTriggerJsonConverter.cs @@ -0,0 +1,115 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.Build.WebApi +{ + internal sealed class BuildTriggerJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(BuildTrigger).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanRead + { + get + { + return true; + } + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + var contract = serializer.ContractResolver.ResolveContract(objectType) as JsonObjectContract; + if (contract == null) + { + return existingValue; + } + + JsonProperty property = contract.Properties.GetClosestMatchProperty("TriggerType"); + if (property == null) + { + return existingValue; + } + + JToken itemTypeValue; + DefinitionTriggerType triggerType; + JObject value = JObject.Load(reader); + if (!value.TryGetValue(property.PropertyName, StringComparison.OrdinalIgnoreCase, out itemTypeValue)) + { + return existingValue; + } + else + { + if (itemTypeValue.Type == JTokenType.Integer) + { + triggerType = (DefinitionTriggerType)(Int32)itemTypeValue; + } + else if (itemTypeValue.Type != JTokenType.String || + !Enum.TryParse((String)itemTypeValue, true, out triggerType)) + { + return existingValue; + } + } + + Object returnValue = null; + switch (triggerType) + { + case DefinitionTriggerType.ContinuousIntegration: + returnValue = new ContinuousIntegrationTrigger(); + break; + case DefinitionTriggerType.GatedCheckIn: + returnValue = new GatedCheckInTrigger(); + break; + case DefinitionTriggerType.Schedule: + returnValue = new ScheduleTrigger(); + break; + case DefinitionTriggerType.PullRequest: + returnValue = new PullRequestTrigger(); + break; + case DefinitionTriggerType.BuildCompletion: + returnValue = new BuildCompletionTrigger(); + break; + } + + if (value != null && returnValue != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, returnValue); + } + } + + return returnValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/BuildWorkspace.cs b/src/Sdk/BuildWebApi/Api/Contracts/BuildWorkspace.cs new file mode 100644 index 00000000000..00e11c4cd2b --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/BuildWorkspace.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an entry in a workspace mapping. + /// + [DataContract] + public class MappingDetails + { + /// + /// The server path. + /// + [DataMember(Name = "serverPath")] + public String ServerPath + { + get; + set; + } + + /// + /// The mapping type. + /// + [DataMember(Name = "mappingType")] + public String MappingType + { + get; + set; + } + + /// + /// The local path. + /// + [DataMember(Name = "localPath")] + public String LocalPath + { + get; + set; + } + } + + /// + /// Represents a workspace mapping. + /// + [DataContract] + public class BuildWorkspace + { + /// + /// The list of workspace mapping entries. + /// + public List Mappings + { + get + { + if (m_mappings == null) + { + m_mappings = new List(); + } + return m_mappings; + } + } + + [DataMember(Name = "mappings")] + private List m_mappings; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Change.cs b/src/Sdk/BuildWebApi/Api/Contracts/Change.cs new file mode 100644 index 00000000000..7035e37f28a --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Change.cs @@ -0,0 +1,114 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a change associated with a build. + /// + [DataContract] + public class Change : BaseSecuredObject + { + public Change() + : this(null) + { + } + + internal Change( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The identifier for the change. For a commit, this would be the SHA1. For a TFVC changeset, this would be the changeset ID. + /// + [DataMember(EmitDefaultValue = false)] + public String Id + { + get; + set; + } + + /// + /// The description of the change. This might be a commit message or changeset description. + /// + [DataMember(EmitDefaultValue = false)] + public String Message + { + get; + set; + } + + /// + /// The type of change. "commit", "changeset", etc. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// The author of the change. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef Author + { + get; + set; + } + + /// + /// The timestamp for the change. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? Timestamp + { + get; + set; + } + + /// + /// The location of the full representation of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Location + { + get; + set; + } + + /// + /// Indicates whether the message was truncated. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean MessageTruncated + { + get; + set; + } + + /// + /// The location of a user-friendly representation of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public Uri DisplayUri + { + get; + set; + } + + /// + /// The person or process that pushed the change. + /// + [DataMember(EmitDefaultValue = false)] + public String Pusher + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DefinitionReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/DefinitionReference.cs new file mode 100644 index 00000000000..b817b231155 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DefinitionReference.cs @@ -0,0 +1,195 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Core.WebApi; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a definition. + /// + [DataContract] + [KnownType(typeof(BuildDefinition))] + [KnownType(typeof(BuildDefinitionReference))] + [JsonConverter(typeof(DefinitionReferenceJsonConverter))] +#pragma warning disable 618 + public class DefinitionReference : ShallowReference, ISecuredObject +#pragma warning restore 618 + { + /// + /// The ID of the referenced definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public new Int32 Id + { + get + { + return base.Id; + } + set + { + base.Id = value; + } + } + + /// + /// The name of the referenced definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public new String Name + { + get + { + return base.Name; + } + set + { + base.Name = value; + } + } + + /// + /// The REST URL of the definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public new String Url + { + get + { + return base.Url; + } + set + { + base.Url = value; + } + } + + /// + /// The definition's URI. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Uri Uri + { + get; + set; + } + + /// + /// The folder path of the definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Path + { + get; + set; + } + + /// + /// The type of the definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DefinitionType Type + { + get; + set; + } + + /// + /// A value that indicates whether builds can be queued against this definition. + /// + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public DefinitionQueueStatus QueueStatus + { + get; + set; + } + + /// + /// The definition revision number. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? Revision + { + get; + set; + } + + /// + /// The date this version of the definition was created. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime CreatedDate + { + get; + set; + } + + /// + /// A reference to the project. + /// + [DataMember(EmitDefaultValue = false, Order = 1000)] + public TeamProjectReference Project + { + get; + set; + } + + #region ISecuredObject implementation + + Guid ISecuredObject.NamespaceId => Security.BuildNamespaceId; + + Int32 ISecuredObject.RequiredPermissions => m_requiredPermissions; + + String ISecuredObject.GetToken() + { + if (!String.IsNullOrEmpty(m_nestingSecurityToken)) + { + return m_nestingSecurityToken; + } + + return GetToken(this.Project, this.Path, this.Id); + } + + internal void SetRequiredPermissions( + Int32 newValue) + { + m_requiredPermissions = newValue; + } + + internal void SetNestingSecurityToken( + String tokenValue) + { + // For anything more detailed than a DefinitionReference, + // we don't let you use a nesting security token. + if (this is BuildDefinitionReference) + { + // Debug.Fail("Nesting security tokens is not allowed for anything more detailed than a DefinitionReference"); + m_nestingSecurityToken = String.Empty; + return; + } + + m_nestingSecurityToken = tokenValue; + } + + internal static String GetToken( + TeamProjectReference project, + String path, + Int32 definitionId) + { + return GetToken(project?.Id, path, definitionId); + } + + internal static String GetToken( + Guid? projectId, + String path, + Int32 definitionId) + { + return String.Concat(projectId?.ToString("D") ?? String.Empty, Security.GetSecurityTokenPath(path ?? String.Empty), definitionId); + } + + private Int32 m_requiredPermissions = BuildPermissions.ViewBuildDefinition; + private String m_nestingSecurityToken = String.Empty; + #endregion + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DefinitionResourceReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/DefinitionResourceReference.cs new file mode 100644 index 00000000000..f7f8c075174 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DefinitionResourceReference.cs @@ -0,0 +1,61 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public class DefinitionResourceReference : BaseSecuredObject + { + public DefinitionResourceReference() + : this(null) + { + } + + internal DefinitionResourceReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// A friendly name for the resource. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The type of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// The id of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public String Id + { + get; + set; + } + + /// + /// Indicates whether the resource is authorized for use. + /// + [DataMember] + public Boolean Authorized + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Demand.cs b/src/Sdk/BuildWebApi/Api/Contracts/Demand.cs new file mode 100644 index 00000000000..2437b417646 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Demand.cs @@ -0,0 +1,117 @@ +using System; +using System.Text.RegularExpressions; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a demand used by a definition or build. + /// + [JsonConverter(typeof(DemandJsonConverter))] + public abstract class Demand : BaseSecuredObject + { + protected Demand( + String name, + String value) + : this(name, value, null) + { + } + + protected Demand( + String name, + String value, + ISecuredObject securedObject) + : base(securedObject) + { + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + this.Name = name; + this.Value = value; + } + + /// + /// The name of the capability referenced by the demand. + /// + public String Name + { + get; + private set; + } + + /// + /// The demanded value. + /// + public String Value + { + get; + private set; + } + + public override sealed Boolean Equals(Object obj) + { + Demand demand = obj as Demand; + return demand != null && demand.ToString().Equals(this.ToString(), StringComparison.OrdinalIgnoreCase); + } + + public override sealed Int32 GetHashCode() + { + return this.ToString().ToUpperInvariant().GetHashCode(); + } + + public override sealed String ToString() + { + return GetExpression(); + } + + /// + /// Clones this object. + /// + /// + public abstract Demand Clone(); + + protected abstract String GetExpression(); + + /// + /// Parses a string into a Demand instance. + /// + /// + /// + /// + public static Boolean TryParse( + String input, + out Demand demand) + { + demand = null; + + Match match = s_demandRegex.Match(input); + if (!match.Success) + { + return false; + } + + String name = match.Groups["name"].Value; + String opcode = match.Groups["opcode"].Value; + String value = match.Groups["value"].Value; + + if (String.IsNullOrEmpty(opcode)) + { + demand = new DemandExists(name); + } + else + { + switch (opcode) + { + case "equals": + demand = new DemandEquals(name, value); + break; + } + } + + return demand != null; + } + + private static readonly Regex s_demandRegex = new Regex(@"^(?[^ ]+)([ ]+\-(?[^ ]+)[ ]+(?.*))?$", + RegexOptions.Compiled); + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DemandEquals.cs b/src/Sdk/BuildWebApi/Api/Contracts/DemandEquals.cs new file mode 100644 index 00000000000..14ef4dd1a06 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DemandEquals.cs @@ -0,0 +1,46 @@ +using System; +using System.Globalization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an "equals" demand. + /// + /// + /// This demand is satisfied when the value of the named capability matches the value stored in the demand. + /// + public sealed class DemandEquals : Demand + { + public DemandEquals( + String name, + String value) + : this(name, value, null) + { + } + + public DemandEquals( + String name, + String value, + ISecuredObject securedObject) + : base(name, value, securedObject) + { + ArgumentUtility.CheckStringForNullOrEmpty(value, "value"); + } + + /// + /// Clones this object. + /// + /// + public override Demand Clone() + { + return new DemandEquals(this.Name, this.Value); + } + + protected override String GetExpression() + { + return String.Format(CultureInfo.InvariantCulture, "{0} -equals {1}", this.Name, this.Value); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DemandExists.cs b/src/Sdk/BuildWebApi/Api/Contracts/DemandExists.cs new file mode 100644 index 00000000000..35164ad9362 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DemandExists.cs @@ -0,0 +1,41 @@ +using System; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an "exists" demand. + /// + /// + /// This demand is satisfied as long as the named capability exists, regardless of its value. + /// + public sealed class DemandExists : Demand + { + public DemandExists( + String name) + : this(name, null) + { + } + + public DemandExists( + String name, + ISecuredObject securedObject) + : base(name, null, securedObject) + { + } + + /// + /// Clones this object. + /// + /// + public override Demand Clone() + { + return new DemandExists(this.Name); + } + + protected override String GetExpression() + { + return this.Name; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DemandJsonConverter.cs b/src/Sdk/BuildWebApi/Api/Contracts/DemandJsonConverter.cs new file mode 100644 index 00000000000..d21f3f46b56 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DemandJsonConverter.cs @@ -0,0 +1,46 @@ +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using System; +using System.Reflection; + +namespace GitHub.Build.WebApi +{ + internal sealed class DemandJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(Demand).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (existingValue == null && reader.TokenType == JsonToken.String) + { + Demand demand; + if (Demand.TryParse((String)reader.Value, out demand)) + { + existingValue = demand; + } + } + + return existingValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + + if (value != null) + { + writer.WriteValue(value.ToString()); + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Dependency.cs b/src/Sdk/BuildWebApi/Api/Contracts/Dependency.cs new file mode 100644 index 00000000000..d4dadfc5e6a --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Dependency.cs @@ -0,0 +1,43 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a dependency. + /// + [DataContract] + public class Dependency : BaseSecuredObject + { + public Dependency() + { + } + + internal Dependency( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The scope. This names the object referenced by the dependency. + /// + [DataMember] + public String Scope + { + get; + set; + } + + /// + /// The event. The dependency is satisfied when the referenced object emits this event. + /// + [DataMember] + public String Event + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinition3_2.cs b/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinition3_2.cs new file mode 100644 index 00000000000..c0073b4a98e --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinition3_2.cs @@ -0,0 +1,441 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Common.Contracts; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Internals +{ + /// + /// For back-compat with extensions that use the old Steps format instead of Process and Phases + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class BuildDefinition3_2 : BuildDefinitionReference3_2 + { + public BuildDefinition3_2() + { + this.JobAuthorizationScope = BuildAuthorizationScope.ProjectCollection; + } + + /// + /// The build number format + /// + [DataMember(EmitDefaultValue = false)] + public String BuildNumberFormat + { + get; + set; + } + + /// + /// The comment entered when saving the definition + /// + [DataMember(EmitDefaultValue = false)] + public String Comment + { + get; + set; + } + + /// + /// The description + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// The drop location for the definition + /// + [DataMember(EmitDefaultValue = false)] + public String DropLocation + { + get; + set; + } + + /// + /// The job authorization scope for builds which are queued against this definition + /// + [DataMember] + public BuildAuthorizationScope JobAuthorizationScope + { + get; + set; + } + + /// + /// The job execution timeout in minutes for builds which are queued against this definition + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobTimeoutInMinutes + { + get; + set; + } + + /// + /// The job cancel timeout in minutes for builds which are cancelled by user for this definition + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobCancelTimeoutInMinutes + { + get; + set; + } + + /// + /// Indicates whether badges are enabled for this definition + /// + [DataMember(EmitDefaultValue = false)] + public Boolean BadgeEnabled + { + get; + set; + } + + /// + /// The list of steps for this definition. + /// + public List Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + /// + /// Build options + /// + public List Options + { + get + { + if (m_options == null) + { + m_options = new List(); + } + return m_options; + } + } + + /// + /// The repository + /// + [DataMember(EmitDefaultValue = false)] + public BuildRepository Repository + { + get; + set; + } + + /// + /// Process Parameters + /// + [DataMember(EmitDefaultValue = false)] + public ProcessParameters ProcessParameters + { + get; + set; + } + + /// + /// The triggers + /// + public List Triggers + { + get + { + if (m_triggers == null) + { + m_triggers = new List(); + } + return m_triggers; + } + } + + /// + /// The variables. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_variables; + } + } + + /// + /// The demands. + /// + public List Demands + { + get + { + if (m_demands == null) + { + m_demands = new List(); + } + return m_demands; + } + } + + /// + /// The retention rules. + /// + public List RetentionRules + { + get + { + if (m_retentionRules == null) + { + m_retentionRules = new List(); + } + return m_retentionRules; + } + } + + /// + /// A collection of properties which may be used to extend the storage fields available + /// for a given definition. + /// + public PropertiesCollection Properties + { + get + { + if (m_properties == null) + { + m_properties = new PropertiesCollection(); + } + return m_properties; + } + internal set + { + m_properties = value; + } + } + + /// + /// A collection of tags associated with the build definition. + /// + public List Tags + { + get + { + if (m_tags == null) + { + m_tags = new List(); + } + return m_tags; + } + } + + /// + /// The latest build for this definition. + /// + public Build LatestBuild + { + get + { + return m_latestBuild; + } + internal set + { + m_latestBuild = value; + } + } + + /// + /// The latest completed build for this definition. + /// + public Build LatestCompletedBuild + { + get + { + return m_latestCompletedBuild; + } + internal set + { + m_latestCompletedBuild = value; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedOptions, ref m_options, true); + SerializationHelper.Copy(ref m_serializedSteps, ref m_steps, true); + SerializationHelper.Copy(ref m_serializedTriggers, ref m_triggers, true); + SerializationHelper.Copy(ref m_serializedVariables, ref m_variables, StringComparer.OrdinalIgnoreCase, true); + SerializationHelper.Copy(ref m_serializedDemands, ref m_demands, true); + SerializationHelper.Copy(ref m_serializedRetentionRules, ref m_retentionRules, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_options, ref m_serializedOptions); + SerializationHelper.Copy(ref m_steps, ref m_serializedSteps); + SerializationHelper.Copy(ref m_triggers, ref m_serializedTriggers); + SerializationHelper.Copy(ref m_variables, ref m_serializedVariables, StringComparer.OrdinalIgnoreCase); + SerializationHelper.Copy(ref m_demands, ref m_serializedDemands); + SerializationHelper.Copy(ref m_retentionRules, ref m_serializedRetentionRules); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedSteps = null; + m_serializedOptions = null; + m_serializedTriggers = null; + m_serializedVariables = null; + m_serializedRetentionRules = null; + } + + [DataMember(Name = "Build", EmitDefaultValue = false)] + private List m_serializedSteps; + + [DataMember(Name = "Options", EmitDefaultValue = false)] + private List m_serializedOptions; + + [DataMember(Name = "Triggers", EmitDefaultValue = false)] + private List m_serializedTriggers; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_serializedVariables; + + [DataMember(Name = "Demands", EmitDefaultValue = false)] + private List m_serializedDemands; + + [DataMember(Name = "RetentionRules", EmitDefaultValue = false)] + private List m_serializedRetentionRules; + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Properties")] + private PropertiesCollection m_properties; + + [DataMember(EmitDefaultValue = false, Name = "Tags")] + private List m_tags; + + [DataMember(EmitDefaultValue = false, Name = "LatestBuild")] + private Build m_latestBuild; + + [DataMember(EmitDefaultValue = false, Name = "LatestCompletedBuild")] + private Build m_latestCompletedBuild; + + private List m_demands; + private List m_options; + private List m_triggers; + private List m_retentionRules; + private List m_steps; + private IDictionary m_variables; + } + + internal static class BuildDefinition3_2Extensions + { + public static BuildDefinition ToBuildDefinition( + this BuildDefinition3_2 source) + { + if (source == null) + { + return null; + } + + var result = new BuildDefinition() + { + AuthoredBy = source.AuthoredBy, + BadgeEnabled = source.BadgeEnabled, + BuildNumberFormat = source.BuildNumberFormat, + Comment = source.Comment, + CreatedDate = source.CreatedDate, + DefinitionQuality = source.DefinitionQuality, + Description = source.Description, + DropLocation = source.DropLocation, + Id = source.Id, + JobAuthorizationScope = source.JobAuthorizationScope, + JobCancelTimeoutInMinutes = source.JobCancelTimeoutInMinutes, + JobTimeoutInMinutes = source.JobTimeoutInMinutes, + LatestBuild = source.LatestBuild, + LatestCompletedBuild = source.LatestCompletedBuild, + Name = source.Name, + ParentDefinition = source.ParentDefinition, + Path = source.Path, + ProcessParameters = source.ProcessParameters, + Project = source.Project, + Queue = source.Queue, + QueueStatus = source.QueueStatus, + Repository = source.Repository, + Revision = source.Revision, + Type = source.Type, + Uri = source.Uri, + Url = source.Url + }; + + if (source.Demands.Count > 0) + { + result.Demands.AddRange(source.Demands); + } + + if (source.Metrics.Count > 0) + { + result.Metrics.AddRange(source.Metrics); + } + + if (source.Options.Count > 0) + { + result.Options.AddRange(source.Options); + } + + var process = new DesignerProcess(); + result.Process = process; + + var phase = new Phase(); + process.Phases.Add(phase); + + if (source.Steps.Count > 0) + { + phase.Steps.AddRange(source.Steps); + } + + foreach (var property in source.Properties) + { + result.Properties.Add(property.Key, property.Value); + } + + if (source.RetentionRules.Count > 0) + { + result.RetentionRules.AddRange(source.RetentionRules); + } + + if (source.Tags.Count > 0) + { + result.Tags.AddRange(source.Tags); + } + + if (source.Triggers.Count > 0) + { + result.Triggers.AddRange(source.Triggers); + } + + foreach (var variablePair in source.Variables) + { + result.Variables.Add(variablePair.Key, variablePair.Value); + } + + return result; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionReference3_2.cs b/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionReference3_2.cs new file mode 100644 index 00000000000..b000c2297d7 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionReference3_2.cs @@ -0,0 +1,127 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// For back-compat with extensions that use the old Steps format instead of Process and Phases + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class BuildDefinitionReference3_2 : DefinitionReference + { + public BuildDefinitionReference3_2() + { + Type = DefinitionType.Build; + QueueStatus = DefinitionQueueStatus.Enabled; + } + + /// + /// The quality of the definition document (draft, etc.) + /// + [DataMember(EmitDefaultValue = false, Name = "Quality")] + public DefinitionQuality? DefinitionQuality + { + get; + set; + } + + /// + /// The author of the definition. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef AuthoredBy + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// A reference to the definition that this definition is a draft of, if this is a draft definition. + /// + [DataMember(EmitDefaultValue = false, Name = "draftOf")] + public DefinitionReference ParentDefinition + { + get; + set; + } + + /// + /// The list of drafts associated with this definition, if this is not a draft definition. + /// + [DataMember(EmitDefaultValue = false)] + public List Drafts + { + get + { + return m_drafts ?? (m_drafts = new List()); + } + internal set + { + m_drafts = value; + } + } + + /// + /// The default queue for builds run against this definition. + /// + [DataMember(EmitDefaultValue = false)] + public AgentPoolQueue Queue + { + get; + set; + } + + /// + /// The metrics for this definition. + /// + public List Metrics + { + get + { + return m_metrics ?? (m_metrics = new List()); + } + internal set + { + m_metrics = value; + } + } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links => m_links ?? (m_links = new ReferenceLinks()); + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedMetrics, ref m_metrics, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_metrics, ref m_serializedMetrics); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedMetrics = null; + } + + [DataMember(Name = "Metrics", EmitDefaultValue = false)] + private List m_serializedMetrics; + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + + private List m_metrics; + + private List m_drafts; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionTemplate3_2.cs b/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionTemplate3_2.cs new file mode 100644 index 00000000000..313e38daef1 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Deprecated/BuildDefinitionTemplate3_2.cs @@ -0,0 +1,123 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Internals +{ + /// + /// For back-compat with extensions that use the old Steps format instead of Process and Phases + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class BuildDefinitionTemplate3_2 + { + + public BuildDefinitionTemplate3_2() + { + Category = "Custom"; + } + + [DataMember(IsRequired = true)] + public String Id + { + get; + set; + } + + [DataMember(IsRequired = true)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean CanDelete + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public String Category + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public String DefaultHostedQueue + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid IconTaskId + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public BuildDefinition3_2 Template + { + get; + set; + } + + public IDictionary Icons + { + get + { + if (m_icons == null) + { + m_icons = new Dictionary(StringComparer.Ordinal); + } + return m_icons; + } + } + + [DataMember(EmitDefaultValue = false, Name = "Icons")] + private Dictionary m_icons; + } + + internal static class BuildDefinitionTemplate3_2Extensions + { + public static BuildDefinitionTemplate ToBuildDefinitionTemplate( + this BuildDefinitionTemplate3_2 source) + { + if (source == null) + { + return null; + } + + var result = new BuildDefinitionTemplate() + { + CanDelete = source.CanDelete, + Category = source.Category, + DefaultHostedQueue = source.DefaultHostedQueue, + Description = source.Description, + IconTaskId = source.IconTaskId, + Id = source.Id, + Name = source.Name, + Template = source.Template.ToBuildDefinition() + }; + + foreach (var iconPair in source.Icons) + { + result.Icons.Add(iconPair.Key, iconPair.Value); + } + + return result; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DesignerProcess.cs b/src/Sdk/BuildWebApi/Api/Contracts/DesignerProcess.cs new file mode 100644 index 00000000000..4ca66945177 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DesignerProcess.cs @@ -0,0 +1,48 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a build process supported by the build definition designer. + /// + [DataContract] + public class DesignerProcess : BuildProcess + { + public DesignerProcess() + :this(null) + { + } + + internal DesignerProcess( + ISecuredObject securedObject) + : base(ProcessType.Designer, securedObject) + { + } + + /// + /// The list of phases. + /// + public List Phases + { + get + { + if (m_phases == null) + { + m_phases = new List(); + } + return m_phases; + } + } + + [DataMember(Name = "Phases", EmitDefaultValue = false)] + private List m_phases; + + /// + /// The target for the build process. + /// + [DataMember(EmitDefaultValue = false)] + public DesignerProcessTarget Target { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DesignerProcessTarget.cs b/src/Sdk/BuildWebApi/Api/Contracts/DesignerProcessTarget.cs new file mode 100644 index 00000000000..293cda6ff5c --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DesignerProcessTarget.cs @@ -0,0 +1,27 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the target for the build process. + /// + [DataContract] + public class DesignerProcessTarget : BaseSecuredObject + { + public DesignerProcessTarget() + { + } + + public DesignerProcessTarget(ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// Agent specification for the build process. + /// + [DataMember(EmitDefaultValue = false)] + public AgentSpecification AgentSpecification { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DockerProcess.cs b/src/Sdk/BuildWebApi/Api/Contracts/DockerProcess.cs new file mode 100644 index 00000000000..7e8febc8154 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DockerProcess.cs @@ -0,0 +1,23 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public class DockerProcess : BuildProcess + { + public DockerProcess() + : this(null) + { + } + + internal DockerProcess( + ISecuredObject securedObject) + : base(ProcessType.Docker, securedObject) + { + } + + [DataMember(EmitDefaultValue = false)] + public DockerProcessTarget Target { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/DockerProcessTarget.cs b/src/Sdk/BuildWebApi/Api/Contracts/DockerProcessTarget.cs new file mode 100644 index 00000000000..68ba4985ff9 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/DockerProcessTarget.cs @@ -0,0 +1,21 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the target for the docker build process. + /// + [DataContract] + public class DockerProcessTarget: DesignerProcessTarget + { + public DockerProcessTarget() + { + } + + public DockerProcessTarget(ISecuredObject securedObject) + : base(securedObject) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Folder.cs b/src/Sdk/BuildWebApi/Api/Contracts/Folder.cs new file mode 100644 index 00000000000..a1d330aa4cf --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Folder.cs @@ -0,0 +1,119 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Core.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a folder that contains build definitions. + /// + [DataContract] + public class Folder : ISecuredObject + { + public Folder() + { + } + + /// + /// The full path. + /// + [DataMember(EmitDefaultValue = false)] + public String Path + { + get; + set; + } + + /// + /// The description. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// The date the folder was created. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime CreatedOn + { + get; + set; + } + + /// + /// The process or person who created the folder. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef CreatedBy + { + get; + set; + } + + /// + /// The date the folder was last changed. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? LastChangedDate + { + get; + set; + } + + /// + /// The process or person that last changed the folder. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef LastChangedBy + { + get; + set; + } + + /// + /// The project. + /// + [DataMember(EmitDefaultValue = false)] + public TeamProjectReference Project + { + get; + set; + } + + #region ISecuredObject implementation + + // We don't have folder-specific permissions. Folders are secured by the project. + + public Guid NamespaceId + { + get + { + ArgumentUtility.CheckForNull(Project, nameof(Project)); + return ((ISecuredObject)Project).NamespaceId; + } + } + + public Int32 RequiredPermissions + { + get + { + ArgumentUtility.CheckForNull(Project, nameof(Project)); + return ((ISecuredObject)Project).RequiredPermissions; + } + } + + public String GetToken() + { + ArgumentUtility.CheckForNull(Project, nameof(Project)); + return ((ISecuredObject)Project).GetToken(); + } + + #endregion + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Forks.cs b/src/Sdk/BuildWebApi/Api/Contracts/Forks.cs new file mode 100644 index 00000000000..3c420bb6e53 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Forks.cs @@ -0,0 +1,43 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the ability to build forks of the selected repository. + /// + [DataContract] + public sealed class Forks : BaseSecuredObject + { + public Forks() + { + } + + internal Forks( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// Indicates whether the trigger should queue builds for forks of the selected repository. + /// + [DataMember] + public Boolean Enabled + { + get; + set; + } + + /// + /// Indicates whether a build should use secrets when building forks of the selected repository. + /// + [DataMember] + public Boolean AllowSecrets + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Issue.cs b/src/Sdk/BuildWebApi/Api/Contracts/Issue.cs new file mode 100644 index 00000000000..1e4d5c905b7 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Issue.cs @@ -0,0 +1,98 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an issue (error, warning) associated with a build. + /// + [DataContract] + public class Issue : BaseSecuredObject + { + public Issue() + { + } + + internal Issue( + ISecuredObject securedObject) + : base(securedObject) + { + } + + private Issue( + Issue issueToBeCloned) + : base(issueToBeCloned) + { + this.Type = issueToBeCloned.Type; + this.Category = issueToBeCloned.Category; + this.Message = issueToBeCloned.Message; + + if (issueToBeCloned.m_data != null) + { + foreach (var item in issueToBeCloned.m_data) + { + this.Data.Add(item); + } + } + } + + /// + /// The type (error, warning) of the issue. + /// + [DataMember(Order = 1)] + public IssueType Type + { + get; + set; + } + + /// + /// The category. + /// + [DataMember(Order = 2)] + public String Category + { + get; + set; + } + + /// + /// A description of the issue. + /// + [DataMember(Order = 3)] + public String Message + { + get; + set; + } + + /// + /// A dictionary containing details about the issue. + /// + public IDictionary Data + { + get + { + if (m_data == null) + { + m_data = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_data; + } + } + + /// + /// Clones this object. + /// + /// + public Issue Clone() + { + return new Issue(this); + } + + [DataMember(Name = "Data", EmitDefaultValue = false, Order = 4)] + private IDictionary m_data; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/JustInTimeProcess.cs b/src/Sdk/BuildWebApi/Api/Contracts/JustInTimeProcess.cs new file mode 100644 index 00000000000..09c9f65a7ba --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/JustInTimeProcess.cs @@ -0,0 +1,20 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public class JustInTimeProcess : BuildProcess + { + public JustInTimeProcess() + : this(null) + { + } + + internal JustInTimeProcess( + ISecuredObject securedObject) + : base(ProcessType.JustInTime, securedObject) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/MultipleAgentExecutionOptions.cs b/src/Sdk/BuildWebApi/Api/Contracts/MultipleAgentExecutionOptions.cs new file mode 100644 index 00000000000..5c4627142ef --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/MultipleAgentExecutionOptions.cs @@ -0,0 +1,46 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents options for running a phase against multiple agents. + /// + [DataContract] + public class MultipleAgentExecutionOptions : AgentTargetExecutionOptions + { + public MultipleAgentExecutionOptions() + : this(null) + { + } + + internal MultipleAgentExecutionOptions( + ISecuredObject securedObject) + : base(AgentTargetExecutionType.MultipleAgents, securedObject) + { + MaxConcurrency = 1; + } + + /// + /// The maximum number of agents to use simultaneously. + /// + [DataMember(EmitDefaultValue=true)] + [DefaultValue(1)] + public Int32 MaxConcurrency { + get; + set; + } + + /// + /// Indicates whether failure on one agent should prevent the phase from running on other agents. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean ContinueOnError + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Phase.cs b/src/Sdk/BuildWebApi/Api/Contracts/Phase.cs new file mode 100644 index 00000000000..7440edc6ce8 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Phase.cs @@ -0,0 +1,186 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a phase of a build definition. + /// + [DataContract] + public class Phase : BaseSecuredObject + { + public Phase() + { + } + + internal Phase( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The name of the phase. + /// + [DataMember] + public String Name + { + get; + set; + } + + /// + /// The unique ref name of the phase. + /// + [DataMember] + public String RefName + { + get; + set; + } + + /// + /// The list of steps run by the phase. + /// + public List Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + + return m_steps; + } + set + { + m_steps = value; + } + } + + /// + /// The list of variables defined on the phase. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_variables; + } + set + { + m_variables = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// The list of dependencies for this phase. + /// + public List Dependencies + { + get + { + if (m_dependencies == null) + { + m_dependencies = new List(); + } + + return m_dependencies; + } + set + { + m_dependencies = value; + } + } + + /// + /// The condition that must be true for this phase to execute. + /// + /// + /// The condition is evaluated after all dependencies are satisfied. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String Condition + { + get; + set; + } + + /// + /// The target (agent, server, etc.) for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public PhaseTarget Target + { + get; + set; + } + + /// + /// The job authorization scope for builds queued against this definition. + /// + [DataMember] + public BuildAuthorizationScope JobAuthorizationScope + { + get; + set; + } + + /// + /// The job execution timeout, in minutes, for builds queued against this definition. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobTimeoutInMinutes + { + get; + set; + } + + /// + /// The cancellation timeout, in minutes, for builds queued against this definition. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobCancelTimeoutInMinutes + { + get; + set; + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedDependencies, ref m_dependencies, true); + SerializationHelper.Copy(ref m_serializedSteps, ref m_steps, true); + SerializationHelper.Copy(ref m_serializedVariables, ref m_variables, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_dependencies, ref m_serializedDependencies); + SerializationHelper.Copy(ref m_steps, ref m_serializedSteps); + SerializationHelper.Copy(ref m_variables, ref m_serializedVariables, StringComparer.OrdinalIgnoreCase); + } + + [DataMember(Name = "Dependencies", EmitDefaultValue = false)] + private List m_serializedDependencies; + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private List m_serializedSteps; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_serializedVariables; + + private List m_dependencies; + private List m_steps; + private IDictionary m_variables; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/PhaseTarget.cs b/src/Sdk/BuildWebApi/Api/Contracts/PhaseTarget.cs new file mode 100644 index 00000000000..4dcd45bb3d8 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/PhaseTarget.cs @@ -0,0 +1,46 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the target of a phase. + /// + [DataContract] + [KnownType(typeof(AgentPoolQueueTarget))] + [KnownType(typeof(ServerTarget))] + [JsonConverter(typeof(PhaseTargetJsonConverter))] + public abstract class PhaseTarget : BaseSecuredObject + { + protected PhaseTarget() + { + } + + internal protected PhaseTarget( + Int32 type, + ISecuredObject securedObject) + : base(securedObject) + { + this.Type = type; + } + + protected PhaseTarget(Int32 type) + { + this.Type = type; + } + + /// + /// The type of the target. + /// + /// + /// for valid phase target types. + /// + [DataMember] + public Int32 Type { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/PhaseTargetJsonConverter.cs b/src/Sdk/BuildWebApi/Api/Contracts/PhaseTargetJsonConverter.cs new file mode 100644 index 00000000000..25e8bd117e2 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/PhaseTargetJsonConverter.cs @@ -0,0 +1,36 @@ +using System; + +namespace GitHub.Build.WebApi +{ + internal sealed class PhaseTargetJsonConverter : TypePropertyJsonConverter + { + protected override PhaseTarget GetInstance(Type objectType) + { + if (objectType == typeof(AgentPoolQueueTarget)) + { + return new AgentPoolQueueTarget(); + } + else if (objectType == typeof(ServerTarget)) + { + return new ServerTarget(); + } + else + { + return base.GetInstance(objectType); + } + } + + protected override PhaseTarget GetInstance(Int32 targetType) + { + switch (targetType) + { + case PhaseTargetType.Agent: + return new AgentPoolQueueTarget(); + case PhaseTargetType.Server: + return new ServerTarget(); + default: + return null; + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/PropertyValue.cs b/src/Sdk/BuildWebApi/Api/Contracts/PropertyValue.cs new file mode 100644 index 00000000000..060bf3eeee6 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/PropertyValue.cs @@ -0,0 +1,37 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + // TODO: remove this before dev16 ships. leaving it in for the dev15 cycle to avoid any issues + [Obsolete("This contract is not used by any product code")] + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public sealed class PropertyValue + { + /// + /// Name in the name value mapping + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String PropertyName { get; set; } + + /// + /// Value in the name value mapping + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Object Value { get; set; } + + /// + /// Guid of identity that changed this property value + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid? ChangedBy { get; set; } + + /// + /// The date this property value was changed + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTime? ChangedDate { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/PullRequest.cs b/src/Sdk/BuildWebApi/Api/Contracts/PullRequest.cs new file mode 100644 index 00000000000..8929a7538be --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/PullRequest.cs @@ -0,0 +1,91 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a pull request object. These are retrieved from Source Providers. + /// + [DataContract] + public class PullRequest: BaseSecuredObject + { + public PullRequest() + { + this.Links = new ReferenceLinks(); + } + + internal PullRequest( + ISecuredObject securedObject) + : base(securedObject) + { + this.Links = new ReferenceLinks(); + } + + /// + /// The name of the provider this pull request is associated with. + /// + [DataMember] + public String ProviderName { get; set; } + + /// + /// Unique identifier for the pull request + /// + [DataMember(EmitDefaultValue = false)] + public String Id { get; set; } + + /// + /// Title of the pull request. + /// + [DataMember(EmitDefaultValue = false)] + public String Title { get; set; } + + /// + /// Description for the pull request. + /// + [DataMember(EmitDefaultValue = false)] + public String Description { get; set; } + + /// + /// Current state of the pull request, e.g. open, merged, closed, conflicts, etc. + /// + [DataMember(EmitDefaultValue = false)] + public String CurrentState { get; set; } + + /// + /// Author of the pull request. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef Author { get; set; } + + /// + /// Owner of the source repository of this pull request + /// + [DataMember(EmitDefaultValue = false)] + public String SourceRepositoryOwner { get; set; } + + /// + /// Source branch ref of this pull request + /// + [DataMember(EmitDefaultValue = false)] + public String SourceBranchRef { get; set; } + + /// + /// Owner of the target repository of this pull request + /// + [DataMember(EmitDefaultValue = false)] + public String TargetRepositoryOwner { get; set; } + + /// + /// Target branch ref of this pull request + /// + [DataMember(EmitDefaultValue = false)] + public String TargetBranchRef { get; set; } + + /// + /// The links to other objects related to this object. + /// + [DataMember(Name = "_links", EmitDefaultValue = false)] + public ReferenceLinks Links { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/RepositoryWebhook.cs b/src/Sdk/BuildWebApi/Api/Contracts/RepositoryWebhook.cs new file mode 100644 index 00000000000..1dd5be4a46a --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/RepositoryWebhook.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a repository's webhook returned from a source provider. + /// + [DataContract] + public class RepositoryWebhook + { + /// + /// The types of triggers the webhook was created for. + /// + public List Types + { + get + { + if (m_types == null) + { + m_types = new List(); + } + + return m_types; + } + set + { + m_types = value; + } + } + + /// + /// The friendly name of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + /// + /// The URL of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Url { get; set; } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedTypes, ref m_types, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_types, ref m_serializedTypes); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedTypes = null; + } + + [DataMember(Name = nameof(Types), EmitDefaultValue = false)] + private List m_serializedTypes; + + private List m_types; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/RequestShallowReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/RequestShallowReference.cs new file mode 100644 index 00000000000..c32ea4c3985 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/RequestShallowReference.cs @@ -0,0 +1,30 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + // TODO: remove this before dev16 ships. leaving it in for the dev15 cycle to avoid any issues + [Obsolete("This contract is not used by any product code")] + [DataContract] + public class RequestReference + { + /// + /// Id of the resource + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 Id { get; set; } + + /// + /// Full http link to the resource + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Url { get; set; } + + /// + /// Name of the requestor + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IdentityRef RequestedFor { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/RetentionPolicy.cs b/src/Sdk/BuildWebApi/Api/Contracts/RetentionPolicy.cs new file mode 100644 index 00000000000..58d20bc206a --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/RetentionPolicy.cs @@ -0,0 +1,171 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a retention policy for a build definition. + /// + [DataContract] + public class RetentionPolicy : BaseSecuredObject + { + public RetentionPolicy() + : this(null) + { + } + + public RetentionPolicy( + ISecuredObject securedObject) + : base(securedObject) + { + DaysToKeep = 30; // default to 30 days + MinimumToKeep = 1; // default to 1 + DeleteBuildRecord = true; // default to set Deleted bit on build records + DeleteTestResults = false; // For old build definitions, it has to be false. This value in New Definitions will be handled in ts files. + } + + /// + /// The list of branches affected by the retention policy. + /// + public List Branches + { + get + { + if (m_branches == null) + { + m_branches = new List(); + } + + return m_branches; + } + internal set + { + m_branches = value; + } + } + + /// + /// The number of days to keep builds. + /// + [DataMember] + public Int32 DaysToKeep + { + get + { + return m_daysToKeep; + } + + set + { + if (value < 0) + { + m_daysToKeep = 0; + } + else + { + m_daysToKeep = value; + } + } + } + + /// + /// The minimum number of builds to keep. + /// + [DataMember] + public Int32 MinimumToKeep + { + get + { + return m_minimumToKeep; + } + set + { + if (value < 0) + { + m_minimumToKeep = 0; + } + else + { + m_minimumToKeep = value; + } + } + } + + /// + /// Indicates whether the build record itself should be deleted. + /// + [DataMember] + public Boolean DeleteBuildRecord + { + get; + set; + } + + /// + /// The list of artifacts to delete. + /// + public List ArtifactsToDelete + { + get + { + if (m_artifactsToDelete == null) + { + m_artifactsToDelete = new List(); + } + + return m_artifactsToDelete; + } + internal set + { + m_artifactsToDelete = value; + } + } + + // This list contains the types of artifacts to be deleted. + // These are different from ArtifactsToDelete because for certain artifacts giving user a choice for every single artifact can become cumbersome. + // e.g. artifacts in file share - user can choose to delete/keep all the artifacts in file share + /// + /// The list of types of artifacts to delete. + /// + public List ArtifactTypesToDelete + { + get + { + if (m_artifactTypesToDelete == null) + { + m_artifactTypesToDelete = new List(); + } + + return m_artifactTypesToDelete; + } + internal set + { + m_artifactTypesToDelete = value; + } + } + + /// + /// Indicates whether to delete test results associated with the build. + /// + [DataMember] + public Boolean DeleteTestResults + { + get; + set; + } + + [DataMember(Name = "Branches", EmitDefaultValue = false)] + private List m_branches; + + [DataMember(Name = "Artifacts", EmitDefaultValue = false)] + private List m_artifactsToDelete; + + [DataMember(Name = "ArtifactTypesToDelete", EmitDefaultValue = false)] + private List m_artifactTypesToDelete; + + private Int32 m_daysToKeep; + private Int32 m_minimumToKeep; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Schedule.cs b/src/Sdk/BuildWebApi/Api/Contracts/Schedule.cs new file mode 100644 index 00000000000..df912f2047d --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Schedule.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public sealed class Schedule : BaseSecuredObject + { + public Schedule() + { + } + + internal Schedule( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// Time zone of the build schedule (String representation of the time zone ID) + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String TimeZoneId { get; set; } + + /// + /// Local timezone hour to start + /// + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public Int32 StartHours { get; set; } + + /// + /// Local timezone minute to start + /// + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public Int32 StartMinutes { get; set; } + + /// + /// Days for a build (flags enum for days of the week) + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ScheduleDays DaysToBuild { get; set; } + + //TODO: We should be able to remove the ScheduleJobId field in tbl_Definition + /// + /// The Job Id of the Scheduled job that will queue the scheduled build. + /// Since a single trigger can have multiple schedules and we want a single job + /// to process a single schedule (since each schedule has a list of branches + /// to build), the schedule itself needs to define the Job Id. + /// This value will be filled in when a definition is added or updated. The UI + /// does not provide it or use it. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid ScheduleJobId { get; set; } + + /// + /// Branches that the schedule affects + /// + public List BranchFilters + { + get + { + if (m_branchFilters == null) + { + m_branchFilters = new List(); + } + + return m_branchFilters; + } + internal set + { + m_branchFilters = value; + } + } + + /// + /// Flag to determine if this schedule should only build if the associated + /// source has been changed. + /// + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public bool ScheduleOnlyWithChanges { get; set; } + + [DataMember(Name = "BranchFilters", EmitDefaultValue = false)] + private List m_branchFilters; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/ServerTarget.cs b/src/Sdk/BuildWebApi/Api/Contracts/ServerTarget.cs new file mode 100644 index 00000000000..6b218c828ef --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/ServerTarget.cs @@ -0,0 +1,33 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a phase target that runs on the server. + /// + [DataContract] + public class ServerTarget : PhaseTarget + { + public ServerTarget() + : base(PhaseTargetType.Server) + { + } + + internal ServerTarget( + ISecuredObject securedObject) + : base(PhaseTargetType.Server, securedObject) + { + } + + /// + /// The execution options. + /// + [DataMember] + public ServerTargetExecutionOptions ExecutionOptions + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptions.cs b/src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptions.cs new file mode 100644 index 00000000000..fe26b962897 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptions.cs @@ -0,0 +1,53 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents options for running a phase on the server. + /// + [DataContract] + [KnownType(typeof(VariableMultipliersServerExecutionOptions))] + [JsonConverter(typeof(ServerTargetExecutionOptionsJsonConverter))] + public class ServerTargetExecutionOptions : BaseSecuredObject + { + public ServerTargetExecutionOptions() + : this(ServerTargetExecutionType.Normal) + { + } + + protected ServerTargetExecutionOptions(Int32 type) + : this(type, null) + { + } + + internal ServerTargetExecutionOptions( + ISecuredObject securedObject) + : this(ServerTargetExecutionType.Normal, securedObject) + { + } + + internal ServerTargetExecutionOptions( + Int32 type, + ISecuredObject securedObject) + : base(securedObject) + { + this.Type = type; + } + + /// + /// The type. + /// + /// + /// for supported types. + /// + [DataMember(EmitDefaultValue = true)] + public Int32 Type + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptionsJsonConverter.cs b/src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptionsJsonConverter.cs new file mode 100644 index 00000000000..9f2249a1513 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/ServerTargetExecutionOptionsJsonConverter.cs @@ -0,0 +1,36 @@ +using System; + +namespace GitHub.Build.WebApi +{ + internal sealed class ServerTargetExecutionOptionsJsonConverter : TypePropertyJsonConverter + { + protected override ServerTargetExecutionOptions GetInstance(Type objectType) + { + if (objectType == typeof(ServerTargetExecutionType)) + { + return new ServerTargetExecutionOptions(); + } + else if (objectType == typeof(VariableMultipliersServerExecutionOptions)) + { + return new VariableMultipliersServerExecutionOptions(); + } + else + { + return base.GetInstance(objectType); + } + } + + protected override ServerTargetExecutionOptions GetInstance(Int32 targetType) + { + switch (targetType) + { + case ServerTargetExecutionType.Normal: + return new ServerTargetExecutionOptions(); + case ServerTargetExecutionType.VariableMultipliers: + return new VariableMultipliersServerExecutionOptions(); + default: + return null; + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/ShallowReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/ShallowReference.cs new file mode 100644 index 00000000000..97e50d89a86 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/ShallowReference.cs @@ -0,0 +1,35 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// An abstracted reference to some other resource. This class is used to provide the build + /// data contracts with a uniform way to reference other resources in a way that provides easy + /// traversal through links. + /// + [Obsolete("Use one of the specific References instead.")] + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class ShallowReference // TODO: this class is here to maintain binary compat with VS 15 RTW, and should be deleted before dev16 ships + { + /// + /// Id of the resource + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 Id { get; set; } + + /// + /// Name of the linked resource (definition name, controller name, etc.) + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Name { get; set; } + + /// + /// Full http link to the resource + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Url { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/SourceProviderAttributes.cs b/src/Sdk/BuildWebApi/Api/Contracts/SourceProviderAttributes.cs new file mode 100644 index 00000000000..33c8d20ad10 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/SourceProviderAttributes.cs @@ -0,0 +1,181 @@ +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public class SourceProviderAttributes + { + /// + /// The name of the source provider. + /// + [DataMember] + public string Name + { + get; + set; + } + + /// + /// The types of triggers supported by this source provider. + /// + [DataMember(EmitDefaultValue = false)] + public IList SupportedTriggers + { + get; + set; + } + + /// + /// The capabilities supported by this source provider. + /// + [DataMember(EmitDefaultValue = false)] + public IDictionary SupportedCapabilities + { + get; + set; + } + + /// + /// The environments where this source provider is available. + /// + [IgnoreDataMember] + public SourceProviderAvailability Availability + { + get; + set; + } + + /// + /// Whether the repository type is external to TFS / VSTS servers + /// + [IgnoreDataMember] + public bool IsExternal + { + get; + set; + } + + #region Server-side convenience properties + [IgnoreDataMember] + public bool SupportsSourceLinks => SupportedCapabilities != null && SupportedCapabilities.TryGetValue(SourceProviderCapabilities.SourceLinks, out bool supported) && supported; + + [IgnoreDataMember] + public bool SupportsYamlDefinition => SupportedCapabilities != null && SupportedCapabilities.TryGetValue(SourceProviderCapabilities.YamlDefinition, out bool supported) && supported; + + [IgnoreDataMember] + public DefinitionTriggerType SupportedTriggerTypes => SupportedTriggers?.Select(t => t.Type).Aggregate(DefinitionTriggerType.None, (x, y) => x | y) ?? DefinitionTriggerType.None; + #endregion + } + + [DataContract] + public class SupportedTrigger + { + /// + /// The type of trigger. + /// + [DataMember] + public DefinitionTriggerType Type { get; set; } + + /// + /// How the trigger is notified of changes. + /// + /// + /// See for supported values. + /// + [DataMember] + public string NotificationType { get; set; } + + /// + /// The default interval to wait between polls (only relevant when is ). + /// + [DataMember] + public int DefaultPollingInterval { get; set; } + + /// + /// The capabilities supported by this trigger. + /// + [DataMember(EmitDefaultValue = false)] + public IDictionary SupportedCapabilities + { + get; + set; + } + } + + [DataContract] + public enum SupportLevel + { + /// + /// The functionality is not supported. + /// + [EnumMember] + Unsupported, + + /// + /// The functionality is supported. + /// + [EnumMember] + Supported, + + /// + /// The functionality is required. + /// + [EnumMember] + Required + } + + [DataContract] + public enum SourceProviderAvailability + { + /// + /// The source provider is available in the hosted environment. + /// + [EnumMember] + Hosted = 1, + + /// + /// The source provider is available in the on-premises environment. + /// + [EnumMember] + OnPremises = 2, + + /// + /// The source provider is available in all environments. + /// + [EnumMember] + All = Hosted | OnPremises + } + + public class SourceProviderCapabilities + { + public const string CreateLabel = "createLabel"; + public const string QueryBranches = "queryBranches"; + public const string QueryFileContents = "queryFileContents"; + public const string QueryPathContents = "queryPathContents"; + public const string QueryPullRequest = "queryPullRequest"; + public const string QueryRelatedWorkItems = "queryRelatedWorkItems"; + public const string QueryRepositories = "queryRepositories"; + public const string QueryTopRepositories = "queryTopRepositories"; + public const string QueryWebhooks = "queryWebhooks"; + public const string SourceLinks = "sourceLinks"; + public const string YamlDefinition = "yamlDefinition"; + } + + public class TriggerCapabilities + { + public const string BranchFilters = "branchFilters"; + public const string PathFilters = "pathFilters"; + public const string BatchChanges = "batchChanges"; + public const string BuildForks = "buildForks"; + public const string Comments = "comments"; + } + + public class TriggerNotificationTypes + { + public const string None = "none"; + public const string Polling = "polling"; + public const string Webhook = "webhook"; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/SourceRelatedWorkItem.cs b/src/Sdk/BuildWebApi/Api/Contracts/SourceRelatedWorkItem.cs new file mode 100644 index 00000000000..0e3c713ff07 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/SourceRelatedWorkItem.cs @@ -0,0 +1,84 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a work item related to some source item. + /// These are retrieved from Source Providers. + /// + [DataContract] + public class SourceRelatedWorkItem : BaseSecuredObject + { + public SourceRelatedWorkItem() + { + } + + internal SourceRelatedWorkItem( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The name of the provider the work item is associated with. + /// + [DataMember] + public String ProviderName { get; set; } + + /// + /// Unique identifier for the work item + /// + [DataMember(EmitDefaultValue = false)] + public String Id { get; set; } + + /// + /// Short name for the work item. + /// + [DataMember(EmitDefaultValue = false)] + public String Title { get; set; } + + /// + /// Long description for the work item. + /// + [DataMember(EmitDefaultValue = false)] + public String Description { get; set; } + + /// + /// Type of work item, e.g. Bug, Task, User Story, etc. + /// + [DataMember(EmitDefaultValue = false)] + public String Type { get; set; } + + /// + /// Current state of the work item, e.g. Active, Resolved, Closed, etc. + /// + [DataMember(EmitDefaultValue = false)] + public String CurrentState { get; set; } + + /// + /// Identity ref for the person that the work item is assigned to. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef AssignedTo { get; set; } + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/SourceRepository.cs b/src/Sdk/BuildWebApi/Api/Contracts/SourceRepository.cs new file mode 100644 index 00000000000..1e5aca8922f --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/SourceRepository.cs @@ -0,0 +1,171 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a repository returned from a source provider. + /// + [DataContract] + public class SourceRepository + { + /// + /// The ID of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String Id + { + get; + set; + } + + /// + /// The name of the source provider the repository is from. + /// + [DataMember] + public String SourceProviderName + { + get; + set; + } + + /// + /// The friendly name of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The full name of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String FullName + { + get; + set; + } + + /// + /// The URL of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Url + { + get; + set; + } + + /// + /// The name of the default branch. + /// + [DataMember(EmitDefaultValue = false)] + public String DefaultBranch + { + get; + set; + } + + // TODO: Remove the Properties property. It mainly serves as an area to put provider API URLs that are + // passed back to the VSTS API so it does not need to construct provider API URLs. This is risky and we + // should form the URLs ourselves instead of trusting the client. + + /// + /// A dictionary that holds additional information about the repository. + /// + public IDictionary Properties + { + // Warning: This can contain secrets too. As part of #952656, we resolve secrets, it was done considering the fact that this is not a "DataMember" + // If it's ever made a "DataMember" please be cautious, we would be leaking secrets + get + { + if (m_properties == null) + { + m_properties = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_properties; + } + internal set + { + m_properties = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedProperties, ref m_properties, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_properties, ref m_serializedProperties, StringComparer.OrdinalIgnoreCase); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedProperties = null; + } + + [DataMember(Name = "Properties", EmitDefaultValue = false)] + private IDictionary m_serializedProperties; + + // Warning: This can contain secrets too. As part of #952656, we resolve secrets, it was done considering the fact that this is not a "DataMember" + // If it's ever made a "DataMember" please be cautious, we would be leaking secrets + private IDictionary m_properties; + } + + /// + /// A set of repositories returned from the source provider. + /// + [DataContract] + public class SourceRepositories + { + /// + /// A list of repositories + /// + [DataMember] + public List Repositories + { + get; + set; + } + + /// + /// A token used to continue this paged request; 'null' if the request is complete + /// + [DataMember] + public String ContinuationToken + { + get; + set; + } + + /// + /// The number of repositories requested for each page + /// + [DataMember] + public Int32 PageLength + { + get; + set; + } + + /// + /// The total number of pages, or '-1' if unknown + /// + [DataMember] + public Int32 TotalPageCount + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/SourceRepositoryItem.cs b/src/Sdk/BuildWebApi/Api/Contracts/SourceRepositoryItem.cs new file mode 100644 index 00000000000..851ac2e39dd --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/SourceRepositoryItem.cs @@ -0,0 +1,36 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an item in a repository from a source provider. + /// + [DataContract] + public class SourceRepositoryItem + { + /// + /// The type of the item (folder, file, etc). + /// + [DataMember(EmitDefaultValue = false)] + public String Type { get; set; } + + /// + /// Whether the item is able to have sub-items (e.g., is a folder). + /// + [DataMember] + public Boolean IsContainer { get; set; } + + /// + /// The full path of the item, relative to the root of the repository. + /// + [DataMember(EmitDefaultValue = false)] + public String Path { get; set; } + + /// + /// The URL of the item. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Url { get; set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/SvnWorkspace.cs b/src/Sdk/BuildWebApi/Api/Contracts/SvnWorkspace.cs new file mode 100644 index 00000000000..84bae7ec3e8 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/SvnWorkspace.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a Subversion mapping entry. + /// + [DataContract] + public class SvnMappingDetails + { + /// + /// The server path. + /// + [DataMember(Name = "serverPath")] + public String ServerPath + { + get; + set; + } + + /// + /// The local path. + /// + [DataMember(Name = "localPath")] + public String LocalPath + { + get; + set; + } + + /// + /// The revision. + /// + [DataMember(Name = "revision")] + public String Revision + { + get; + set; + } + + /// + /// The depth. + /// + [DataMember(Name = "depth")] + public Int32 Depth + { + get; + set; + } + + /// + /// Indicates whether to ignore externals. + /// + [DataMember(Name = "ignoreExternals")] + public bool IgnoreExternals + { + get; + set; + } + } + + /// + /// Represents a subversion workspace. + /// + [DataContract] + public class SvnWorkspace + { + /// + /// The list of mappings. + /// + public List Mappings + { + get + { + if (m_Mappings == null) + { + m_Mappings = new List(); + } + return m_Mappings; + } + } + + [DataMember(Name = "mappings")] + private List m_Mappings; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TaskAgentPoolReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/TaskAgentPoolReference.cs new file mode 100644 index 00000000000..bc1ce0c4f79 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TaskAgentPoolReference.cs @@ -0,0 +1,66 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to an agent pool. + /// + [DataContract] + public class TaskAgentPoolReference : BaseSecuredObject + { + public TaskAgentPoolReference() + { + } + + public TaskAgentPoolReference(Int32 id) + : this(id, null) + { + } + + internal TaskAgentPoolReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + internal TaskAgentPoolReference( + Int32 id, + ISecuredObject securedObject) + : base(securedObject) + { + this.Id = id; + } + + /// + /// The pool ID. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + /// + /// The pool name. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// A value indicating whether or not this pool is managed by the service. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsHosted + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanGroupReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanGroupReference.cs new file mode 100644 index 00000000000..25fa80c297c --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanGroupReference.cs @@ -0,0 +1,30 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a plan group. + /// + [DataContract] + public class TaskOrchestrationPlanGroupReference + { + /// + /// The project ID. + /// + [DataMember] + public Guid ProjectId + { + get; set; + } + + /// + /// The name of the plan group. + /// + [DataMember] + public String PlanGroup + { + get; set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanReference.cs new file mode 100644 index 00000000000..a8c7e3c19ae --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TaskOrchestrationPlanReference.cs @@ -0,0 +1,50 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to an orchestration plan. + /// + [DataContract] + public class TaskOrchestrationPlanReference : BaseSecuredObject + { + public TaskOrchestrationPlanReference() + { + OrchestrationType = BuildOrchestrationType.Build; + } + + internal TaskOrchestrationPlanReference( + ISecuredObject securedObject) + : base(securedObject) + { + OrchestrationType = BuildOrchestrationType.Build; + } + + /// + /// The ID of the plan. + /// + [DataMember] + public Guid PlanId + { + get; + set; + } + + /// + /// The type of the plan. + /// + /// + /// for supported types. + /// + [DefaultValue(BuildOrchestrationType.Build)] + [DataMember(EmitDefaultValue = false)] + public Int32? OrchestrationType + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TaskReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/TaskReference.cs new file mode 100644 index 00000000000..3522d34dcbb --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TaskReference.cs @@ -0,0 +1,71 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a task. + /// + [DataContract] + public class TaskReference : BaseSecuredObject + { + public TaskReference() + { + } + + public TaskReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + protected TaskReference( + TaskReference taskToBeCloned) + : base(taskToBeCloned) + { + this.Id = taskToBeCloned.Id; + this.Name = taskToBeCloned.Name; + this.Version = taskToBeCloned.Version; + } + + /// + /// The ID of the task definition. + /// + [DataMember] + public Guid Id + { + get; + set; + } + + /// + /// The name of the task definition. + /// + [DataMember] + public String Name + { + get; + set; + } + + /// + /// The version of the task definition. + /// + [DataMember] + public String Version + { + get; + set; + } + + /// + /// Clones this object. + /// + /// + public virtual TaskReference Clone() + { + return new TaskReference(this); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/Timeline.cs b/src/Sdk/BuildWebApi/Api/Contracts/Timeline.cs new file mode 100644 index 00000000000..7cb78b098cb --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/Timeline.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents the timeline of a build. + /// + [DataContract] + public sealed class Timeline : TimelineReference + { + internal Timeline() + { + } + + internal Timeline( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The process or person that last changed the timeline. + /// + [DataMember] + public Guid LastChangedBy + { + get; + internal set; + } + + /// + /// The time the timeline was last changed. + /// + [DataMember] + public DateTime LastChangedOn + { + get; + internal set; + } + + /// + /// The list of records in this timeline. + /// + public List Records + { + get + { + if (m_records == null) + { + m_records = new List(); + } + return m_records; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (m_serializedRecords != null && m_serializedRecords.Count > 0) + { + m_records = new List(m_serializedRecords); + m_serializedRecords = null; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_records != null && m_records.Count > 0) + { + m_serializedRecords = new List(m_records); + } + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedRecords = null; + } + + private List m_records; + + [DataMember(Name = "Records", EmitDefaultValue = false)] + private List m_serializedRecords; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TimelineAttempt.cs b/src/Sdk/BuildWebApi/Api/Contracts/TimelineAttempt.cs new file mode 100644 index 00000000000..b0131f77b61 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TimelineAttempt.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using System.Text; +using System.Threading.Tasks; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public sealed class TimelineAttempt : BaseSecuredObject + { + public TimelineAttempt() + { + } + + internal TimelineAttempt( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// Gets or sets the attempt of the record. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Attempt + { + get; + set; + } + + /// + /// Gets or sets the timeline identifier which owns the record representing this attempt. + /// + [DataMember(EmitDefaultValue = false)] + public Guid TimelineId + { + get; + set; + } + + /// + /// Gets or sets the record identifier located within the specified timeline. + /// + [DataMember(EmitDefaultValue = false)] + public Guid RecordId + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TimelineRecord.cs b/src/Sdk/BuildWebApi/Api/Contracts/TimelineRecord.cs new file mode 100644 index 00000000000..8a2205d999b --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TimelineRecord.cs @@ -0,0 +1,309 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents an entry in a build's timeline. + /// + [DataContract] + public sealed class TimelineRecord : BaseSecuredObject + { + public TimelineRecord() + { + } + + internal TimelineRecord( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the record. + /// + [DataMember] + public Guid Id + { + get; + set; + } + + /// + /// The ID of the record's parent. + /// + [DataMember] + public Guid? ParentId + { + get; + set; + } + + /// + /// The type of the record. + /// + [DataMember(Name = "Type")] + public String RecordType + { + get; + set; + } + + /// + /// The name. + /// + [DataMember] + public String Name + { + get; + set; + } + + /// + /// The start time. + /// + [DataMember] + public DateTime? StartTime + { + get; + set; + } + + /// + /// The finish time. + /// + [DataMember] + public DateTime? FinishTime + { + get; + set; + } + + /// + /// A string that indicates the current operation. + /// + [DataMember] + public String CurrentOperation + { + get; + set; + } + + /// + /// The current completion percentage. + /// + [DataMember] + public Int32? PercentComplete + { + get; + set; + } + + /// + /// The state of the record. + /// + [DataMember] + public TimelineRecordState? State + { + get; + set; + } + + /// + /// The result. + /// + [DataMember] + public TaskResult? Result + { + get; + set; + } + + /// + /// The result code. + /// + [DataMember] + public String ResultCode + { + get; + set; + } + + /// + /// The change ID. + /// + /// + /// This is a monotonically-increasing number used to ensure consistency in the UI. + /// + [DataMember] + public Int32 ChangeId + { + get; + set; + } + + /// + /// The time the record was last modified. + /// + [DataMember] + public DateTime LastModified + { + get; + set; + } + + /// + /// The name of the agent running the operation. + /// + [DataMember] + public String WorkerName + { + get; + set; + } + + /// + /// An ordinal value relative to other records. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? Order + { + get; + set; + } + + /// + /// A reference to a sub-timeline. + /// + [DataMember] + public TimelineReference Details + { + get; + set; + } + + /// + /// The number of errors produced by this operation. + /// + [DataMember] + public Int32? ErrorCount + { + get; + set; + } + + /// + /// The number of warnings produced by this operation. + /// + [DataMember] + public Int32? WarningCount + { + get; + set; + } + + /// + /// The list of issues produced by this operation. + /// + public List Issues + { + get + { + if (m_issues == null) + { + m_issues = new List(); + } + return m_issues; + } + } + + /// + /// The REST URL of the timeline record. + /// + [DataMember] + public Uri Url + { + get; + set; + } + + /// + /// A reference to the log produced by this operation. + /// + [DataMember] + public BuildLogReference Log + { + get; + set; + } + + /// + /// A reference to the task represented by this timeline record. + /// + [DataMember] + public TaskReference Task + { + get; + set; + } + + /// + /// Attempt number of record. + /// + [DataMember] + public Int32 Attempt + { + get; + set; + } + + /// + /// String identifier that is consistent across attempts. + /// + [DataMember] + public String Identifier + { + get; + set; + } + + public IList PreviousAttempts + { + get + { + if (m_previousAttempts == null) + { + m_previousAttempts = new List(); + } + return m_previousAttempts; + } + } + + + /// + /// The links to other objects related to this object. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + + [DataMember(Name = "Issues", EmitDefaultValue = false, Order = 60)] + private List m_issues; + + [DataMember(Name = "PreviousAttempts", EmitDefaultValue = false)] + private List m_previousAttempts; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/TimelineReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/TimelineReference.cs new file mode 100644 index 00000000000..2ba50efd94b --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/TimelineReference.cs @@ -0,0 +1,56 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a timeline. + /// + [DataContract] + public class TimelineReference : BaseSecuredObject + { + internal TimelineReference() + { + } + + internal TimelineReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the timeline. + /// + [DataMember(Order = 1)] + public Guid Id + { + get; + internal set; + } + + /// + /// The change ID. + /// + /// + /// This is a monotonically-increasing number used to ensure consistency in the UI. + /// + [DataMember(Order = 2)] + public Int32 ChangeId + { + get; + internal set; + } + + /// + /// The REST URL of the timeline. + /// + [DataMember(Order = 3)] + public String Url + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/VariableGroup.cs b/src/Sdk/BuildWebApi/Api/Contracts/VariableGroup.cs new file mode 100644 index 00000000000..0e463e40e5a --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/VariableGroup.cs @@ -0,0 +1,98 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a variable group. + /// + [DataContract] + public class VariableGroup : VariableGroupReference + { + public VariableGroup() + : this(null) + { + } + + internal VariableGroup( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The type of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// The name of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// The description. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// The variables in this group. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_variables; + } + internal set + { + m_variables = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedVariables, ref m_variables, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_variables, ref m_serializedVariables, StringComparer.OrdinalIgnoreCase); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedVariables = null; + } + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_serializedVariables; + + private IDictionary m_variables; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/VariableGroupReference.cs b/src/Sdk/BuildWebApi/Api/Contracts/VariableGroupReference.cs new file mode 100644 index 00000000000..b6f3f8f1d15 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/VariableGroupReference.cs @@ -0,0 +1,44 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a reference to a variable group. + /// + [DataContract] + public class VariableGroupReference : BaseSecuredObject + { + public VariableGroupReference() + : this(null) + { + } + + internal VariableGroupReference( + ISecuredObject securedObject) + : base(securedObject) + { + } + + /// + /// The ID of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + /// + /// The Name of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Alias + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersAgentExecutionOptions.cs b/src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersAgentExecutionOptions.cs new file mode 100644 index 00000000000..d325d3db85a --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersAgentExecutionOptions.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents options for running a phase based on values specified by a list of variables. + /// + [DataContract] + public class VariableMultipliersAgentExecutionOptions : AgentTargetExecutionOptions, IVariableMultiplierExecutionOptions + { + public VariableMultipliersAgentExecutionOptions() + : base(AgentTargetExecutionType.VariableMultipliers) + { + } + + internal VariableMultipliersAgentExecutionOptions( + ISecuredObject securedObject) + : base(AgentTargetExecutionType.VariableMultipliers, securedObject) + { + MaxConcurrency = 1; + ContinueOnError = false; + } + + /// + /// The maximum number of agents to use in parallel. + /// + [DataMember(EmitDefaultValue=true)] + [DefaultValue(1)] + public Int32 MaxConcurrency { + get; + set; + } + + /// + /// Indicates whether failure on one agent should prevent the phase from running on other agents. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean ContinueOnError + { + get; + set; + } + + /// + /// The list of variables used to determine the matrix of phases to run. + /// + public List Multipliers + { + get + { + if (m_multipliers == null) + { + m_multipliers = new List(); + } + return m_multipliers; + } + set + { + m_multipliers = value; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedMultipliers, ref m_multipliers, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_multipliers, ref m_serializedMultipliers); + } + + [DataMember(Name = "Multipliers", EmitDefaultValue = false)] + private List m_serializedMultipliers; + + private List m_multipliers; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersServerExecutionOptions.cs b/src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersServerExecutionOptions.cs new file mode 100644 index 00000000000..505c570e6c2 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/VariableMultipliersServerExecutionOptions.cs @@ -0,0 +1,84 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents options for running a phase based on values specified by a list of variables. + /// + [DataContract] + public class VariableMultipliersServerExecutionOptions : ServerTargetExecutionOptions, IVariableMultiplierExecutionOptions + { + public VariableMultipliersServerExecutionOptions() + : this(null) + { + } + + internal VariableMultipliersServerExecutionOptions( + ISecuredObject securedObject) + : base(ServerTargetExecutionType.VariableMultipliers, securedObject) + { + MaxConcurrency = 1; + ContinueOnError = false; + } + + /// + /// The maximum number of server jobs to run in parallel. + /// + [DataMember(EmitDefaultValue = true)] + [DefaultValue(1)] + public Int32 MaxConcurrency { + get; + set; + } + + /// + /// Indicates whether failure of one job should prevent the phase from running in other jobs. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean ContinueOnError + { + get; + set; + } + + /// + /// The list of variables used to determine the matrix of phases to run. + /// + public List Multipliers + { + get + { + if (m_multipliers == null) + { + m_multipliers = new List(); + } + return m_multipliers; + } + set + { + m_multipliers = new List(value); + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedMultipliers, ref m_multipliers, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_multipliers, ref m_serializedMultipliers); + } + + [DataMember(Name = "Multipliers", EmitDefaultValue = false)] + private List m_serializedMultipliers; + + private List m_multipliers; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/WorkspaceTemplate.cs b/src/Sdk/BuildWebApi/Api/Contracts/WorkspaceTemplate.cs new file mode 100644 index 00000000000..06831b63b93 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/WorkspaceTemplate.cs @@ -0,0 +1,141 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public sealed class WorkspaceTemplate + { + /// + /// Uri of the associated definition + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String DefinitionUri + { + get; + set; + } + + /// + /// List of workspace mappings + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public List Mappings + { + get; + set; + } + + /// + /// The last time this template was modified + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTime LastModifiedDate + { + get; + set; + } + + /// + /// The identity that last modified this template + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String LastModifiedBy + { + get; + set; + } + + /// + /// Id of the workspace for this template + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + internal Int32 WorkspaceId + { + get; + set; + } + + } + + /// + /// Mapping for a workspace + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public sealed class WorkspaceMapping + { + /// + /// Server location of the definition + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ServerItem + { + get; + set; + } + + /// + /// local location of the definition + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String LocalItem + { + get; + set; + } + + /// + /// type of workspace mapping + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public WorkspaceMappingType MappingType + { + get; + set; + } + + /// + /// Depth of this mapping + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 Depth + { + get; + set; + } + + /// + /// Uri of the associated definition + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + internal String DefinitionUri + { + get; + set; + } + + /// + /// Id of the workspace + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + internal Int32 WorkspaceId + { + get; + set; + } + + public override String ToString() + { + return String.Format(System.Globalization.CultureInfo.InvariantCulture, + "[WorkspaceMapping ServerItem={0} LocalItem={1} MappingType={2} Depth={3}]", + ServerItem, + LocalItem, + MappingType, + Depth); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Contracts/YamlProcess.cs b/src/Sdk/BuildWebApi/Api/Contracts/YamlProcess.cs new file mode 100644 index 00000000000..f10ae8e0344 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Contracts/YamlProcess.cs @@ -0,0 +1,76 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi +{ + /// + /// Represents a YAML process. + /// + [DataContract] + public class YamlProcess : BuildProcess + { + public YamlProcess() + : this(null) + { + } + + internal YamlProcess( + ISecuredObject securedObject) + : base(ProcessType.Yaml, securedObject) + { + } + + /// + /// The resources used by the build definition. + /// + [DataMember(EmitDefaultValue = false)] + public BuildProcessResources Resources + { + get; + set; + } + + /// + /// The list of errors encountered when reading the YAML definition. + /// + public IList Errors + { + get + { + if (m_errors == null) + { + m_errors = new List(); + } + return m_errors; + } + set + { + m_errors = new List(value); + } + } + + /// + /// The YAML filename. + /// + [DataMember] + public String YamlFilename + { + get; + set; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_errors?.Count == 0) + { + m_errors = null; + } + } + + [DataMember(Name = "Errors", EmitDefaultValue = false)] + private List m_errors; + } +} diff --git a/src/Sdk/BuildWebApi/Api/DefinitionMetrics.cs b/src/Sdk/BuildWebApi/Api/DefinitionMetrics.cs new file mode 100644 index 00000000000..49b5ce45d33 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/DefinitionMetrics.cs @@ -0,0 +1,35 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class DefinitionMetrics + { + // historic metrics + public const String SuccessfulBuilds = "SuccessfulBuilds"; + public const String FailedBuilds = "FailedBuilds"; + public const String PartiallySuccessfulBuilds = "PartiallySuccessfulBuilds"; + public const String CanceledBuilds = "CanceledBuilds"; + public const String TotalBuilds = "TotalBuilds"; + + // current metrics - scopeddate null + public const String CurrentBuildsInQueue = "CurrentBuildsInQueue"; + public const String CurrentBuildsInProgress = "CurrentBuildsInProgress"; + } + + [Obsolete("Use DefinitionMetrics instead.")] + public static class WellKnownDefinitionMetrics + { + // historic metrics + public const String SuccessfulBuilds = DefinitionMetrics.SuccessfulBuilds; + public const String FailedBuilds = DefinitionMetrics.FailedBuilds; + public const String PartiallySuccessfulBuilds = DefinitionMetrics.PartiallySuccessfulBuilds; + public const String CanceledBuilds = DefinitionMetrics.CanceledBuilds; + public const String TotalBuilds = DefinitionMetrics.TotalBuilds; + + // current metrics - scopeddate null + public const String CurrentBuildsInQueue = DefinitionMetrics.CurrentBuildsInQueue; + public const String CurrentBuildsInProgress = DefinitionMetrics.CurrentBuildsInProgress; + } +} diff --git a/src/Sdk/BuildWebApi/Api/DefinitionReferenceJsonConverter.cs b/src/Sdk/BuildWebApi/Api/DefinitionReferenceJsonConverter.cs new file mode 100644 index 00000000000..9f0caa2a783 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/DefinitionReferenceJsonConverter.cs @@ -0,0 +1,112 @@ +using System; +using System.Reflection; +using GitHub.Build.WebApi.Internals; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.Build.WebApi +{ + internal sealed class DefinitionReferenceJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(DefinitionReference).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + Object newValue = null; + + if (objectType == typeof(BuildDefinition)) + { + newValue = new BuildDefinition(); + } + else if (objectType == typeof(BuildDefinition3_2)) + { + newValue = new BuildDefinition3_2(); + } + else if (objectType == typeof(BuildDefinitionReference)) + { + newValue = new BuildDefinitionReference(); + } + + JObject value = JObject.Load(reader); + + if (newValue == null) + { + var contract = serializer.ContractResolver.ResolveContract(objectType) as JsonObjectContract; + if (contract == null) + { + return existingValue; + } + + JsonProperty property = contract.Properties.GetClosestMatchProperty("Type"); + if (property == null) + { + return existingValue; + } + + JToken definitionTypeValue; + DefinitionType definitionType = DefinitionType.Build; + if (value.TryGetValue(property.PropertyName, out definitionTypeValue)) + { + if (definitionTypeValue.Type == JTokenType.Integer) + { + definitionType = (DefinitionType)(Int32)definitionTypeValue; + } + else if (definitionTypeValue.Type != JTokenType.String || + !Enum.TryParse((String)definitionTypeValue, true, out definitionType)) + { + definitionType = DefinitionType.Build; + } + } + + switch (definitionType) + { + case DefinitionType.Build: + default: // this is build2, after all + newValue = new BuildDefinition(); + break; + } + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + // The virtual method returns false for CanWrite so this should never be invoked + throw new NotSupportedException(); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/EndpointData.cs b/src/Sdk/BuildWebApi/Api/EndpointData.cs new file mode 100644 index 00000000000..691af7001bf --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/EndpointData.cs @@ -0,0 +1,51 @@ +using System; + +namespace GitHub.Build.WebApi +{ + public static class EndpointData + { + public const string AcceptUntrustedCertificates = "acceptUntrustedCerts"; + public const string CheckoutNestedSubmodules = "checkoutNestedSubmodules"; + public const string CheckoutSubmodules = "checkoutSubmodules"; + public const string Clean = "clean"; + public const string CleanOptions = "cleanOptions"; + public const string DefaultBranch = "defaultBranch"; + public const string FetchDepth = "fetchDepth"; + public const string GitLfsSupport = "gitLfsSupport"; + public const string JenkinsAcceptUntrustedCertificates = "acceptUntrustedCerts"; + public const string OnPremTfsGit = "onpremtfsgit"; + public const string Password = "password"; + public const string RepositoryId = "repositoryId"; + public const string RootFolder = "rootFolder"; + public const string SkipSyncSource = "skipSyncSource"; + public const string SvnAcceptUntrustedCertificates = "acceptUntrustedCerts"; + public const string SvnRealmName = "realmName"; + public const string SvnWorkspaceMapping = "svnWorkspaceMapping"; + public const string TfvcWorkspaceMapping = "tfvcWorkspaceMapping"; + public const string Username = "username"; + } + + [Obsolete("Use EndpointData instead.")] + public static class WellKnownEndpointData + { + public const string CheckoutNestedSubmodules = EndpointData.CheckoutNestedSubmodules; + public const string CheckoutSubmodules = EndpointData.CheckoutSubmodules; + public const string Clean = EndpointData.Clean; + public const string CleanOptions = EndpointData.CleanOptions; + public const string DefaultBranch = EndpointData.DefaultBranch; + public const string FetchDepth = EndpointData.FetchDepth; + public const string GitLfsSupport = EndpointData.GitLfsSupport; + public const string JenkinsAcceptUntrustedCertificates = EndpointData.JenkinsAcceptUntrustedCertificates; + public const string OnPremTfsGit = EndpointData.OnPremTfsGit; + public const string Password = EndpointData.Password; + public const string RepositoryId = EndpointData.RepositoryId; + public const string RootFolder = EndpointData.RootFolder; + public const string SkipSyncSource = EndpointData.SkipSyncSource; + public const string SvnAcceptUntrustedCertificates = EndpointData.SvnAcceptUntrustedCertificates; + public const string SvnRealmName = EndpointData.SvnRealmName; + public const string SvnWorkspaceMapping = EndpointData.SvnWorkspaceMapping; + public const string TfvcWorkspaceMapping = EndpointData.TfvcWorkspaceMapping; + public const string Username = EndpointData.Username; + public const string AcceptUntrustedCertificates = EndpointData.AcceptUntrustedCertificates; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Enumerations.cs b/src/Sdk/BuildWebApi/Api/Enumerations.cs new file mode 100644 index 00000000000..0e835dc69b9 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Enumerations.cs @@ -0,0 +1,855 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public enum AgentStatus + { + /// + /// Indicates that the build agent cannot be contacted. + /// + [EnumMember] + Unavailable = 0, + + /// + /// Indicates that the build agent is currently available. + /// + [EnumMember] + Available = 1, + + /// + /// Indicates that the build agent has taken itself offline. + /// + [EnumMember] + Offline = 2, + } + + [DataContract] + public enum AuditAction + { + [EnumMember] + Add = 1, + [EnumMember] + Update = 2, + [EnumMember] + Delete = 3 + } + + /// + /// Represents the desired scope of authorization for a build. + /// + [DataContract] + public enum BuildAuthorizationScope + { + /// + /// The identity used should have build service account permissions scoped to the project collection. This is + /// useful when resources for a single build are spread across multiple projects. + /// + [EnumMember] + ProjectCollection = 1, + + /// + /// The identity used should have build service account permissions scoped to the project in which the build + /// definition resides. This is useful for isolation of build jobs to a particular team project to avoid any + /// unintentional escalation of privilege attacks during a build. + /// + [EnumMember] + Project = 2, + } + + [DataContract] + public enum BuildOptionInputType + { + [EnumMember] + String, + [EnumMember] + Boolean, + [EnumMember] + StringList, + [EnumMember] + Radio, + [EnumMember] + PickList, + [EnumMember] + MultiLine, + [EnumMember] + BranchFilter + } + + [DataContract] + public enum BuildPhaseStatus + { + /// + /// The state is not known. + /// + [EnumMember] + Unknown = 0, + + /// + /// The build phase completed unsuccessfully. + /// + [EnumMember] + Failed = 1, + + /// + /// The build phase completed successfully. + /// + [EnumMember] + Succeeded = 2, + } + + /// + /// Specifies the desired ordering of builds. + /// + [DataContract] + public enum BuildQueryOrder + { + /// + /// Order by finish time ascending. + /// + [EnumMember] + FinishTimeAscending = 2, + + /// + /// Order by finish time descending. + /// + [EnumMember] + FinishTimeDescending = 3, + + /// + /// Order by queue time descending. + /// + [EnumMember] + QueueTimeDescending = 4, + + /// + /// Order by queue time ascending. + /// + [EnumMember] + QueueTimeAscending = 5, + + /// + /// Order by start time descending. + /// + [EnumMember] + StartTimeDescending = 6, + + /// + /// Order by start time ascending. + /// + [EnumMember] + StartTimeAscending = 7 + } + + /// + /// Specifies the desired ordering of definitions. + /// + [DataContract] + public enum DefinitionQueryOrder + { + /// + /// No order + /// + [EnumMember] + None = 0, + + /// + /// Order by created on/last modified time ascending. + /// + [EnumMember] + LastModifiedAscending = 1, + + /// + /// Order by created on/last modified time descending. + /// + [EnumMember] + LastModifiedDescending = 2, + + /// + /// Order by definition name ascending. + /// + [EnumMember] + DefinitionNameAscending = 3, + + /// + /// Order by definition name descending. + /// + [EnumMember] + DefinitionNameDescending = 4 + } + + /// + /// Specifies the desired ordering of folders. + /// + [DataContract] + public enum FolderQueryOrder + { + /// + /// No order + /// + [EnumMember] + None = 0, + + /// + /// Order by folder name and path ascending. + /// + [EnumMember] + FolderAscending = 1, + + /// + /// Order by folder name and path descending. + /// + [EnumMember] + FolderDescending = 2 + } + + [DataContract] + public enum BuildReason + { + /// + /// No reason. This value should not be used. + /// + [EnumMember] + None = 0, + + /// + /// The build was started manually. + /// + [EnumMember] + Manual = 1, + + /// + /// The build was started for the trigger TriggerType.ContinuousIntegration. + /// + [EnumMember] + IndividualCI = 2, + + /// + /// The build was started for the trigger TriggerType.BatchedContinuousIntegration. + /// + [EnumMember] + BatchedCI = 4, + + /// + /// The build was started for the trigger TriggerType.Schedule. + /// + [EnumMember] + Schedule = 8, + + /// + /// The build was started for the trigger TriggerType.ScheduleForced. + /// + [EnumMember] + ScheduleForced = 16, + + /// + /// The build was created by a user. + /// + [EnumMember] + UserCreated = 32, + + /// + /// The build was started manually for private validation. + /// + [EnumMember] + ValidateShelveset = 64, + + /// + /// The build was started for the trigger ContinuousIntegrationType.Gated. + /// + [EnumMember] + CheckInShelveset = 128, + + /// + /// The build was started by a pull request. + /// Added in resource version 3. + /// + [EnumMember] + PullRequest = 256, + + /// + /// The build was started when another build completed. + /// + [EnumMember] + BuildCompletion = 512, + + /// + /// The build was triggered for retention policy purposes. + /// + [EnumMember] + Triggered = Manual | IndividualCI | BatchedCI | Schedule | UserCreated | CheckInShelveset | PullRequest | BuildCompletion, + + /// + /// All reasons. + /// + [EnumMember] + All = Manual | IndividualCI | BatchedCI | Schedule | UserCreated | ValidateShelveset | CheckInShelveset | PullRequest | BuildCompletion, + } + + /// + /// This is not a Flags enum because we don't want to set multiple statuses on a build. + /// However, when adding values, please stick to powers of 2 as if it were a Flags enum + /// This will ensure that things that key off multiple result types (like labelling sources) continue to work + /// + [DataContract] + public enum BuildResult + { + /// + /// No result + /// + [EnumMember] + None = 0, + + /// + /// The build completed successfully. + /// + [EnumMember] + Succeeded = 2, + + /// + /// The build completed compilation successfully but had other errors. + /// + [EnumMember] + PartiallySucceeded = 4, + + /// + /// The build completed unsuccessfully. + /// + [EnumMember] + Failed = 8, + + /// + /// The build was canceled before starting. + /// + [EnumMember] + Canceled = 32 + } + + [DataContract] + public enum BuildStatus + { + /// + /// No status. + /// + [EnumMember] + None = 0, + + /// + /// The build is currently in progress. + /// + [EnumMember] + InProgress = 1, + + /// + /// The build has completed. + /// + [EnumMember] + Completed = 2, + + /// + /// The build is cancelling + /// + [EnumMember] + Cancelling = 4, + + /// + /// The build is inactive in the queue. + /// + [EnumMember] + Postponed = 8, + + /// + /// The build has not yet started. + /// + [EnumMember] + NotStarted = 32, + + /// + /// All status. + /// + [EnumMember] + All = 47, + } + + [DataContract] + public enum ControllerStatus + { + /// + /// Indicates that the build controller cannot be contacted. + /// + [EnumMember] + Unavailable = 0, + + /// + /// Indicates that the build controller is currently available. + /// + [EnumMember] + Available = 1, + + /// + /// Indicates that the build controller has taken itself offline. + /// + [EnumMember] + Offline = 2, + } + + [DataContract] + public enum DefinitionType + { + [EnumMember] + Xaml = 1, + [EnumMember] + Build = 2 + + } + + [DataContract] + public enum DefinitionQuality + { + [EnumMember] + Definition = 1, + [EnumMember] + Draft = 2 + } + + [DataContract] + public enum GetOption + { + /// + /// Use the latest changeset at the time the build is queued. + /// + [EnumMember] + LatestOnQueue = 0, + + /// + /// Use the latest changeset at the time the build is started. + /// + [EnumMember] + LatestOnBuild = 1, + + /// + /// A user-specified version has been supplied. + /// + [EnumMember] + Custom = 2, + } + + [DataContract] + public enum IssueType + { + [EnumMember] + Error = 1, + + [EnumMember] + Warning = 2 + } + + [DataContract] + public enum QueryDeletedOption + { + /// + /// Include only non-deleted builds. + /// + [EnumMember] + ExcludeDeleted = 0, + + /// + /// Include deleted and non-deleted builds. + /// + [EnumMember] + IncludeDeleted = 1, + + /// + /// Include only deleted builds. + /// + [EnumMember] + OnlyDeleted = 2 + } + + [DataContract] + public enum QueuePriority + { + /// + /// Low priority. + /// + [EnumMember] + Low = 5, + + /// + /// Below normal priority. + /// + [EnumMember] + BelowNormal = 4, + + /// + /// Normal priority. + /// + [EnumMember] + Normal = 3, + + /// + /// Above normal priority. + /// + [EnumMember] + AboveNormal = 2, + + /// + /// High priority. + /// + [EnumMember] + High = 1, + } + + [DataContract] + [Flags] + public enum QueueOptions + { + /// + /// No queue options + /// + [EnumMember] + None = 0, + + /// + /// Create a plan Id for the build, do not run it + /// + [EnumMember] + DoNotRun = 1 + } + + [DataContract] + public enum RepositoryCleanOptions + { + /// + /// Run git clean -fdx && git reset --hard or Tf /scorch on $(build.sourcesDirectory) + /// + [EnumMember] + Source, + + /// + /// Run git clean -fdx && git reset --hard or Tf /scorch on $(build.sourcesDirectory), also re-create $(build.binariesDirectory) + /// + [EnumMember] + SourceAndOutputDir, + + /// + /// Re-create $(build.sourcesDirectory) + /// + [EnumMember] + SourceDir, + + /// + /// Re-create $(agnet.buildDirectory) which contains $(build.sourcesDirectory), $(build.binariesDirectory) and any folders that left from previous build. + /// + [EnumMember] + AllBuildDir, + } + + [DataContract] + public enum ResultSet + { + /// + /// Include all repositories + /// + [EnumMember] + All = 0, + + /// + /// Include most relevant repositories for user + /// + [EnumMember] + Top = 1, + } + + [DataContract] + public enum ServiceHostStatus + { + /// + /// The service host is currently connected and accepting commands. + /// + [EnumMember] + Online = 1, + + /// + /// The service host is currently disconnected and not accepting commands. + /// + [EnumMember] + Offline = 2, + } + + [DataContract] + public enum TaskResult + { + [EnumMember] + Succeeded = 0, + + [EnumMember] + SucceededWithIssues = 1, + + [EnumMember] + Failed = 2, + + [EnumMember] + Canceled = 3, + + [EnumMember] + Skipped = 4, + + [EnumMember] + Abandoned = 5, + } + + [DataContract] + public enum TimelineRecordState + { + [EnumMember] + Pending, + + [EnumMember] + InProgress, + + [EnumMember] + Completed, + } + + [DataContract] + public enum DefinitionTriggerType + { + /// + /// Manual builds only. + /// + [EnumMember] + None = 1, + + /// + /// A build should be started for each changeset. + /// + [EnumMember] + ContinuousIntegration = 2, + + /// + /// A build should be started for multiple changesets at a time at a specified interval. + /// + [EnumMember] + BatchedContinuousIntegration = 4, + + /// + /// A build should be started on a specified schedule whether or not changesets exist. + /// + [EnumMember] + Schedule = 8, + + /// + /// A validation build should be started for each check-in. + /// + [EnumMember] + GatedCheckIn = 16, + + /// + /// A validation build should be started for each batch of check-ins. + /// + [EnumMember] + BatchedGatedCheckIn = 32, + + /// + /// A build should be triggered when a GitHub pull request is created or updated. + /// Added in resource version 3 + /// + [EnumMember] + PullRequest = 64, + + /// + /// A build should be triggered when another build completes. + /// + [EnumMember] + BuildCompletion = 128, + + /// + /// All types. + /// + [EnumMember] + All = None | ContinuousIntegration | BatchedContinuousIntegration | Schedule | GatedCheckIn | BatchedGatedCheckIn | PullRequest | BuildCompletion, + } + + [DataContract] + public enum DefinitionQueueStatus + { + /// + /// When enabled the definition queue allows builds to be queued by users, + /// the system will queue scheduled, gated and continuous integration builds, + /// and the queued builds will be started by the system. + /// + [EnumMember] + Enabled, + + /// + /// When paused the definition queue allows builds to be queued by users + /// and the system will queue scheduled, gated and continuous integration builds. + /// Builds in the queue will not be started by the system. + /// + [EnumMember] + Paused, + + /// + /// When disabled the definition queue will not allow builds to be queued by users + /// and the system will not queue scheduled, gated or continuous integration builds. + /// Builds already in the queue will not be started by the system. + /// + [EnumMember] + Disabled + } + + [DataContract] + public enum DeleteOptions + { + /// + /// No data should be deleted. This value should not be used. + /// + [EnumMember] + None = 0, + + /// + /// The drop location should be deleted. + /// + [EnumMember] + DropLocation = 1, + + /// + /// The test results should be deleted. + /// + [EnumMember] + TestResults = 2, + + /// + /// The version control label should be deleted. + /// + [EnumMember] + Label = 4, + + /// + /// The build should be deleted. + /// + [EnumMember] + Details = 8, + + /// + /// Published symbols should be deleted. + /// + [EnumMember] + Symbols = 16, + + /// + /// All data should be deleted. + /// + [EnumMember] + All = 31, + } + + [DataContract] + public enum ScheduleDays + { + /// + /// Do not run. + /// + [EnumMember] + None = 0, + + /// + /// Run on Monday. + /// + [EnumMember] + Monday = 1, + + /// + /// Run on Tuesday. + /// + [EnumMember] + Tuesday = 2, + + /// + /// Run on Wednesday. + /// + [EnumMember] + Wednesday = 4, + + /// + /// Run on Thursday. + /// + [EnumMember] + Thursday = 8, + + /// + /// Run on Friday. + /// + [EnumMember] + Friday = 16, + + /// + /// Run on Saturday. + /// + [EnumMember] + Saturday = 32, + + /// + /// Run on Sunday. + /// + [EnumMember] + Sunday = 64, + + /// + /// Run on all days of the week. + /// + [EnumMember] + All = Monday | Tuesday | Wednesday | Thursday | Friday | Saturday | Sunday, + } + + [DataContract] + public enum WorkspaceMappingType + { + /// + /// The path is mapped in the workspace. + /// + [EnumMember] + Map = 0, + + /// + /// The path is cloaked in the workspace. + /// + [EnumMember] + Cloak = 1, + } + + [DataContract] + public enum ProcessTemplateType + { + /// + /// Indicates a custom template. + /// + [EnumMember] + Custom = 0, + + /// + /// Indicates a default template. + /// + [EnumMember] + Default = 1, + + /// + /// Indicates an upgrade template. + /// + [EnumMember] + Upgrade = 2, + } + + [DataContract] + public enum ValidationResult + { + [EnumMember] + OK, + [EnumMember] + Warning, + [EnumMember] + Error + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildArtifactAddedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildArtifactAddedEvent.cs new file mode 100644 index 00000000000..a1392decdba --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildArtifactAddedEvent.cs @@ -0,0 +1,25 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [Obsolete("Use BuildEvent instead.")] + public class BuildArtifactAddedEvent : BuildUpdatedEvent + { + public BuildArtifactAddedEvent( + Build build, + BuildArtifact artifact) + : base(build) + { + this.Artifact = artifact; + } + + [DataMember(IsRequired = true)] + public BuildArtifact Artifact + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildChangesCalculatedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildChangesCalculatedEvent.cs new file mode 100644 index 00000000000..bddcda88faf --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildChangesCalculatedEvent.cs @@ -0,0 +1,26 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [Obsolete("Use BuildEvent instead.")] + public class BuildChangesCalculatedEvent : BuildUpdatedEvent + { + public BuildChangesCalculatedEvent( + Build build, + List changes) + : base(build) + { + this.Changes = changes; + } + + [DataMember(IsRequired = true)] + public List Changes + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangedEvent.cs new file mode 100644 index 00000000000..fe750169d26 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangedEvent.cs @@ -0,0 +1,34 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [Obsolete("No longer used")] + public class BuildDefinitionChangedEvent + { + public BuildDefinitionChangedEvent() + { + } + + public BuildDefinitionChangedEvent(AuditAction changeType, BuildDefinition definition) + { + Definition = definition; + ChangeType = changeType; + } + + [DataMember] + public BuildDefinition Definition + { + get; + set; + } + + [DataMember] + public AuditAction ChangeType + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangingEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangingEvent.cs new file mode 100644 index 00000000000..961b6406b74 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildDefinitionChangingEvent.cs @@ -0,0 +1,42 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [Obsolete("No longer used")] + public class BuildDefinitionChangingEvent + { + public BuildDefinitionChangingEvent() + { + } + + public BuildDefinitionChangingEvent(AuditAction changeType, BuildDefinition originalDefinition, BuildDefinition newDefinition) + { + OriginalDefinition = originalDefinition; + NewDefinition = newDefinition; + ChangeType = changeType; + } + + [DataMember] + public BuildDefinition OriginalDefinition + { + get; + set; + } + + [DataMember] + public BuildDefinition NewDefinition + { + get; + set; + } + + [DataMember] + public AuditAction ChangeType + { + get; + set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildDeletedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildDeletedEvent.cs new file mode 100644 index 00000000000..1ac20968b76 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildDeletedEvent.cs @@ -0,0 +1,21 @@ +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + public class BuildDeletedEvent : RealtimeBuildEvent + { + public BuildDeletedEvent(Build deletedBuild) + : base(deletedBuild.Id) + { + this.Build = deletedBuild; + } + + [DataMember(IsRequired = true)] + public Build Build + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildDestroyedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildDestroyedEvent.cs new file mode 100644 index 00000000000..c124dd7a179 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildDestroyedEvent.cs @@ -0,0 +1,23 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [Obsolete("No longer used.")] + [DataContract] + public class BuildDestroyedEvent : RealtimeBuildEvent + { + public BuildDestroyedEvent(Build destroyedBuild) + : base(destroyedBuild.Id) + { + this.Build = destroyedBuild; + } + + [DataMember(IsRequired = true)] + public Build Build + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildEvent.cs new file mode 100644 index 00000000000..717209f47c4 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildEvent.cs @@ -0,0 +1,62 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [DataContract] + public sealed class BuildEvent + { + public BuildEvent( + String identifier) + : this(identifier, (String)null) + { + } + + public BuildEvent( + String identifier, + String data) + : this(identifier, new[] { data }) + { + Identifier = identifier; + if (data != null) + { + Data.Add(data); + } + } + + public BuildEvent( + String identifier, + IList data) + { + Identifier = identifier; + if (data != null && data.Count > 0) + { + Data.AddRange(data); + } + } + + [DataMember] + public String Identifier + { + get; + private set; + } + + public IList Data + { + get + { + if (m_data == null) + { + m_data = new List(); + } + return m_data; + } + } + + [DataMember(Name = "Data", EmitDefaultValue = false)] + private List m_data; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildEvents.cs b/src/Sdk/BuildWebApi/Api/Events/BuildEvents.cs new file mode 100644 index 00000000000..da78458367b --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildEvents.cs @@ -0,0 +1,17 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class BuildEvents + { + public const String ArtifactAdded = "artifactAdded"; + public const String BuildUpdated = "buildUpdated"; + public const String ChangesCalculated = "changesCalculated"; + public const String ConsoleLinesReceived = "consoleLinesReceived"; + public const String StagesUpdated = "stagesUpdated"; + public const String TagsAdded = "tagsAdded"; + public const String TimelineRecordsUpdated = "timelineRecordsUpdated"; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildPollingSummaryEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildPollingSummaryEvent.cs new file mode 100644 index 00000000000..be9ddd1b59c --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildPollingSummaryEvent.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [ServiceEventObject] + [Obsolete("No longer used")] + public class BuildPollingSummaryEvent + { + public BuildPollingSummaryEvent(Dictionary ciData) + { + m_ciData = ciData; + } + + public Dictionary CIData + { + get { return m_ciData; } + } + + private Dictionary m_ciData; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildQueuedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildQueuedEvent.cs new file mode 100644 index 00000000000..4f474f67915 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildQueuedEvent.cs @@ -0,0 +1,15 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [ServiceEventObject] + public class BuildQueuedEvent : BuildUpdatedEvent + { + public BuildQueuedEvent(Build build) + : base(build) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildStartedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildStartedEvent.cs new file mode 100644 index 00000000000..b97c810a7d9 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildStartedEvent.cs @@ -0,0 +1,15 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [Obsolete("No longer used.")] + public class BuildStartedEvent : BuildUpdatedEvent + { + public BuildStartedEvent(Build build) + : base(build) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildTagsAddedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildTagsAddedEvent.cs new file mode 100644 index 00000000000..c3613d0ba99 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildTagsAddedEvent.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [ServiceEventObject] + public class BuildTagsAddedEvent : BuildUpdatedEvent + { + public BuildTagsAddedEvent(Build build, List allTags, List newTags) + : base(build) + { + this.AllTags = allTags; + this.NewTags = newTags; + } + + [DataMember(IsRequired = true)] + public List AllTags + { + get; + private set; + } + + [DataMember(IsRequired = true)] + public List NewTags + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildUpdatedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildUpdatedEvent.cs new file mode 100644 index 00000000000..1cae031b656 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildUpdatedEvent.cs @@ -0,0 +1,21 @@ +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + public class BuildUpdatedEvent : RealtimeBuildEvent + { + public BuildUpdatedEvent(Build build) + : base(build.Id) + { + this.Build = build; + } + + [DataMember(IsRequired = true)] + public Build Build + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/BuildsDeletedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/BuildsDeletedEvent.cs new file mode 100644 index 00000000000..f4958f1d5ce --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/BuildsDeletedEvent.cs @@ -0,0 +1,62 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [ServiceEventObject] + public class BuildsDeletedEvent : BuildsDeletedEvent1 + { + } + + // trying this out to avoid future compat issues. + // the idea is to keep this around when we create BuildsDeletedEvent2, and make BuildsDeletedEvent inherit from BuildsDeletedEvent2 instead of BuildsDeletedEvent1 + // then, when we publish to service bus, we can send BuildsDeletedEvent1 explicitly along with BuildsDeletedEvent + [DataContract] + [ServiceEventObject] + [EditorBrowsable(EditorBrowsableState.Never)] + public class BuildsDeletedEvent1 + { + /// + /// The ID of the project. + /// + [DataMember] + public Guid ProjectId + { + get; + set; + } + + /// + /// The ID of the definition. + /// + [DataMember] + public Int32 DefinitionId + { + get; + set; + } + + /// + /// The IDs of the builds that were deleted. + /// + public List BuildIds + { + get + { + if (m_buildIds == null) + { + m_buildIds = new List(); + } + + return m_buildIds; + } + } + + [DataMember(Name = "BuildIds")] + private List m_buildIds; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/ConsoleLogEvent.cs b/src/Sdk/BuildWebApi/Api/Events/ConsoleLogEvent.cs new file mode 100644 index 00000000000..8201e6936e5 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/ConsoleLogEvent.cs @@ -0,0 +1,69 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + public sealed class ConsoleLogEvent : RealtimeBuildEvent + { + public ConsoleLogEvent( + Int32 buildId, + Guid timelineId, + Guid jobTimelineRecordId, + IEnumerable lines) + : this(buildId, timelineId, jobTimelineRecordId, Guid.Empty, lines) + { + } + + public ConsoleLogEvent( + Int32 buildId, + Guid timelineId, + Guid jobTimelineRecordId, + Guid stepTimelineRecordId, + IEnumerable lines) + : base(buildId) + { + this.TimelineId = timelineId; + this.TimelineRecordId = jobTimelineRecordId; + this.StepRecordId = stepTimelineRecordId; + m_lines = new List(lines); + } + + [DataMember(IsRequired = true)] + public Guid TimelineId + { + get; + private set; + } + + [DataMember(IsRequired = true)] + public Guid TimelineRecordId + { + get; + private set; + } + + [DataMember(IsRequired = false)] + public Guid StepRecordId + { + get; + private set; + } + + public List Lines + { + get + { + if (m_lines == null) + { + m_lines = new List(); + } + return m_lines; + } + } + + [DataMember(Name = "Lines")] + private List m_lines; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/RealTimeBuildEvent.cs b/src/Sdk/BuildWebApi/Api/Events/RealTimeBuildEvent.cs new file mode 100644 index 00000000000..357ca3092fd --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/RealTimeBuildEvent.cs @@ -0,0 +1,21 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + public abstract class RealtimeBuildEvent + { + protected RealtimeBuildEvent(Int32 buildId) + { + this.BuildId = buildId; + } + + [DataMember(IsRequired = true)] + public Int32 BuildId + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/SyncBuildCompletedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/SyncBuildCompletedEvent.cs new file mode 100644 index 00000000000..63483bf069d --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/SyncBuildCompletedEvent.cs @@ -0,0 +1,19 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [ServiceEventObject] + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("No longer used.")] + public class SyncBuildCompletedEvent : BuildUpdatedEvent + { + internal SyncBuildCompletedEvent(Build build) + : base(build) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/SyncBuildStartedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/SyncBuildStartedEvent.cs new file mode 100644 index 00000000000..a5629dc0d09 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/SyncBuildStartedEvent.cs @@ -0,0 +1,19 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + [ServiceEventObject] + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("No longer used.")] + public class SyncBuildStartedEvent : BuildUpdatedEvent + { + internal SyncBuildStartedEvent(Build build) + : base(build) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/TaskOrchestrationPlanGroupsStartedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/TaskOrchestrationPlanGroupsStartedEvent.cs new file mode 100644 index 00000000000..7fd4b6d6cc6 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/TaskOrchestrationPlanGroupsStartedEvent.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + public class TaskOrchestrationPlanGroupsStartedEvent + { + public TaskOrchestrationPlanGroupsStartedEvent(IList planGroups) + { + this.PlanGroups = planGroups; + } + + [DataMember(IsRequired = true)] + public IList PlanGroups { get; private set; } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Events/TimelineRecordsUpdatedEvent.cs b/src/Sdk/BuildWebApi/Api/Events/TimelineRecordsUpdatedEvent.cs new file mode 100644 index 00000000000..605a3cab530 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Events/TimelineRecordsUpdatedEvent.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Build.WebApi.Events +{ + [DataContract] + public class TimelineRecordsUpdatedEvent : RealtimeBuildEvent + { + public TimelineRecordsUpdatedEvent( + Int32 buildId, + IEnumerable records) + : base(buildId) + { + this.TimelineRecords = records; + } + + [DataMember(IsRequired = true)] + public IEnumerable TimelineRecords + { + get; + private set; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Exceptions.cs b/src/Sdk/BuildWebApi/Api/Exceptions.cs new file mode 100644 index 00000000000..2aab92154b3 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Exceptions.cs @@ -0,0 +1,1632 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [Serializable] + [ExceptionMapping("0.0", "3.0", "AccessDeniedException", "GitHub.Build.WebApi.AccessDeniedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AccessDeniedException : VssServiceException + { + public AccessDeniedException(String message) + : base(message) + { + } + public AccessDeniedException(String message, Exception ex) + : base(message, ex) + { + } + + protected AccessDeniedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildException", "GitHub.Build.WebApi.BuildException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildException : VssServiceException + { + public BuildException(String message) + : base(message) + { + } + + public BuildException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "AgentsNotFoundException", "GitHub.Build.WebApi.AgentsNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AgentsNotFoundException : BuildException + { + public AgentsNotFoundException(String message) + : base(message) + { + } + + public AgentsNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected AgentsNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ArtifactExistsException", "GitHub.Build.WebApi.ArtifactExistsException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ArtifactExistsException : BuildException + { + public ArtifactExistsException(String message) + : base(message) + { + } + + public ArtifactExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected ArtifactExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ArtifactNotFoundException", "GitHub.Build.WebApi.ArtifactNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ArtifactNotFoundException : BuildException + { + public ArtifactNotFoundException(String message) + : base(message) + { + } + + public ArtifactNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected ArtifactNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ArtifactTypeNotSupportedException", "GitHub.Build.WebApi.ArtifactTypeNotSupportedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ArtifactTypeNotSupportedException : BuildException + { + public ArtifactTypeNotSupportedException(String message) + : base(message) + { + } + + public ArtifactTypeNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected ArtifactTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BranchNotFoundException", "GitHub.Build.WebApi.BranchNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BranchNotFoundException : BuildException + { + public BranchNotFoundException(String message) + : base(message) + { + } + + public BranchNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected BranchNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildControllerNotFoundException", "GitHub.Build.WebApi.BuildControllerNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildControllerNotFoundException : BuildException + { + public BuildControllerNotFoundException(String message) + : base(message) + { + } + + public BuildControllerNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildControllerNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class BuildEventNotFoundException : BuildException + { + public BuildEventNotFoundException(String message) + : base(message) + { + } + + public BuildEventNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildEventNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class BuildEventPermissionException : BuildException + { + public BuildEventPermissionException(String message) + : base(message) + { + } + + public BuildEventPermissionException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildEventPermissionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildExistsException", "GitHub.Build.WebApi.BuildExistsException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildExistsException : BuildException + { + public BuildExistsException(String message) + : base(message) + { + } + + public BuildExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildLogNotFoundException", "GitHub.Build.WebApi.BuildLogNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildLogNotFoundException : BuildException + { + public BuildLogNotFoundException(String message) + : base(message) + { + } + + public BuildLogNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildLogNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildNotFoundException", "GitHub.Build.WebApi.BuildNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildNotFoundException : BuildException + { + public BuildNotFoundException(String message) + : base(message) + { + } + + public BuildNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildNumberFormatException", "GitHub.Build.WebApi.BuildNumberFormatException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildNumberFormatException : BuildException + { + public BuildNumberFormatException(String message) + : base(message) + { + } + + public BuildNumberFormatException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildNumberFormatException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildOptionNotSupportedException", "GitHub.Build.WebApi.BuildOptionNotSupportedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildOptionNotSupportedException : BuildException + { + public BuildOptionNotSupportedException(String message) + : base(message) + { + } + + public BuildOptionNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildOptionNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildRepositoryTypeNotSupportedException", "GitHub.Build.WebApi.BuildRepositoryTypeNotSupportedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildRepositoryTypeNotSupportedException : BuildException + { + public BuildRepositoryTypeNotSupportedException(String message) + : base(message) + { + } + + public BuildRepositoryTypeNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildRepositoryTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildRequestValidationFailedException", "GitHub.Build.WebApi.BuildRequestValidationFailedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildRequestValidationFailedException : BuildException + { + public BuildRequestValidationFailedException(String message) + : base(message) + { + } + + public BuildRequestValidationFailedException(String message, Exception ex) + : base(message, ex) + { + } + + public BuildRequestValidationFailedException(String message, List validationResults) + : base(message) + { + ValidationResults.AddRange(validationResults); + } + + protected BuildRequestValidationFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + m_validationResults = (List)info.GetValue("ValidationResults", typeof(List)); + } + + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("ValidationResults", ValidationResults); + } + + [DataMember(Name = "ValidationResults", EmitDefaultValue = false)] + public List ValidationResults + { + get + { + if (m_validationResults == null) + { + m_validationResults = new List(); + } + return m_validationResults; + } + private set + { + m_validationResults = value; + } + } + + private List m_validationResults; + } + + [Serializable] + [ExceptionMapping("0.0", "4.1", "BuildRequestValidationFailedException", "GitHub.Build.WebApi.BuildRequestValidationFailedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildRequestValidationWarningException : BuildException + { + public BuildRequestValidationWarningException(String message) + : base(message) + { + } + + public BuildRequestValidationWarningException(String message, Exception ex) + : base(message, ex) + { + } + + public BuildRequestValidationWarningException(String message, List validationResults) + : base(message) + { + ValidationResults.AddRange(validationResults); + } + + protected BuildRequestValidationWarningException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + m_validationResults = (List)info.GetValue("ValidationResults", typeof(List)); + } + + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("ValidationResults", ValidationResults); + } + + [DataMember(Name = "ValidationResults", EmitDefaultValue = false)] + public List ValidationResults + { + get + { + if (m_validationResults == null) + { + m_validationResults = new List(); + } + return m_validationResults; + } + private set + { + m_validationResults = value; + } + } + + private List m_validationResults; + } + + [Serializable] + public class BuildEventStatusInvalidChangeException : BuildException + { + public BuildEventStatusInvalidChangeException(String message) + : base(message) + { + } + + public BuildEventStatusInvalidChangeException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildEventStatusInvalidChangeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "BuildStatusInvalidChangeException", "GitHub.Build.WebApi.BuildStatusInvalidChangeException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class BuildStatusInvalidChangeException : BuildException + { + public BuildStatusInvalidChangeException(String message) + : base(message) + { + } + + public BuildStatusInvalidChangeException(String message, Exception ex) + : base(message, ex) + { + } + + protected BuildStatusInvalidChangeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "CannotDeleteRetainedBuildException", "GitHub.Build.WebApi.CannotDeleteRetainedBuildException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class CannotDeleteRetainedBuildException : BuildException + { + public CannotDeleteRetainedBuildException(String message) + : base(message) + { + } + + public CannotDeleteRetainedBuildException(String message, IReadOnlyList buildIds) + : base(message) + { + RetainedBuildIds = buildIds; + } + + public CannotDeleteRetainedBuildException(String message, Exception ex) + : base(message, ex) + { + } + + protected CannotDeleteRetainedBuildException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + [DataMember(EmitDefaultValue = false)] + public IReadOnlyList RetainedBuildIds { get; } + } + + [Serializable] + public class CouldNotRetrieveSourceVersionDisplayUrlException : BuildException + { + public CouldNotRetrieveSourceVersionDisplayUrlException(String message) + : base(message) + { + } + + public CouldNotRetrieveSourceVersionDisplayUrlException(String message, Exception ex) + : base(message, ex) + { + } + + protected CouldNotRetrieveSourceVersionDisplayUrlException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionDisabledException", "GitHub.Build.WebApi.DefinitionDisabledException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionDisabledException : BuildException + { + public DefinitionDisabledException(String message) + : base(message) + { + } + + public DefinitionDisabledException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionDisabledException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionExistsException", "GitHub.Build.WebApi.DefinitionExistsException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionExistsException : BuildException + { + public DefinitionExistsException(String message) + : base(message) + { + } + + public DefinitionExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class FolderExistsException : BuildException + { + public FolderExistsException(String message) + : base(message) + { + } + + public FolderExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected FolderExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class FolderNotFoundException : BuildException + { + public FolderNotFoundException(String message) + : base(message) + { + } + + public FolderNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected FolderNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionNotFoundException", "GitHub.Build.WebApi.DefinitionNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionNotFoundException : BuildException + { + public DefinitionNotFoundException(String message) + : base(message) + { + } + + public DefinitionNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionNotMatchedException", "GitHub.Build.WebApi.DefinitionNotMatchedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionNotMatchedException : BuildException + { + public DefinitionNotMatchedException(String message) + : base(message) + { + } + + public DefinitionNotMatchedException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionNotMatchedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionTemplateExistsException", "GitHub.Build.WebApi.DefinitionTemplateExistsException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionTemplateExistsException : BuildException + { + public DefinitionTemplateExistsException(String message) + : base(message) + { + } + + public DefinitionTemplateExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionTemplateExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionTemplateNotFoundException", "GitHub.Build.WebApi.DefinitionTemplateNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionTemplateNotFoundException : BuildException + { + public DefinitionTemplateNotFoundException(String message) + : base(message) + { + } + + public DefinitionTemplateNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionTemplateNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionTypeNotSupportedException", "GitHub.Build.WebApi.DefinitionTypeNotSupportedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionTypeNotSupportedException : BuildException + { + public DefinitionTypeNotSupportedException(String message) + : base(message) + { + } + + public DefinitionTypeNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DuplicateBuildSpecException", "GitHub.Build.WebApi.DuplicateBuildSpecException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DuplicateBuildSpecException : BuildException + { + public DuplicateBuildSpecException(String message) + : base(message) + { + } + + public DuplicateBuildSpecException(String message, Exception ex) + : base(message, ex) + { + } + + protected DuplicateBuildSpecException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class EndpointAccessDeniedException : BuildException + { + public EndpointAccessDeniedException(String message) + : base(message) + { + } + + public EndpointAccessDeniedException(String message, Exception ex) + : base(message, ex) + { + } + + protected EndpointAccessDeniedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class ExternalSourceProviderException : BuildException + { + public ExternalSourceProviderException(String message) + : base(message) + { + } + + public ExternalSourceProviderException(String message, Exception ex) + : base(message, ex) + { + } + + protected ExternalSourceProviderException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class SecureFileAccessDeniedException : BuildException + { + public SecureFileAccessDeniedException(String message) + : base(message) + { + } + + public SecureFileAccessDeniedException(String message, Exception ex) + : base(message, ex) + { + } + + protected SecureFileAccessDeniedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidArtifactDataException", "GitHub.Build.WebApi.InvalidArtifactDataException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidArtifactDataException : BuildException + { + public InvalidArtifactDataException(String message) + : base(message) + { + } + + public InvalidArtifactDataException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidArtifactDataException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidBuildException", "GitHub.Build.WebApi.InvalidBuildException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidBuildException : BuildException + { + public InvalidBuildException(String message) + : base(message) + { + } + + public InvalidBuildException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidBuildException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidBuildQueryException", "GitHub.Build.WebApi.InvalidBuildQueryException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidBuildQueryException : BuildException + { + public InvalidBuildQueryException(String message) + : base(message) + { + } + + public InvalidBuildQueryException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidBuildQueryException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidContinuationTokenException : BuildException + { + public InvalidContinuationTokenException(String message) + : base(message) + { + } + + public InvalidContinuationTokenException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidContinuationTokenException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidDefinitionException : BuildException + { + public InvalidDefinitionException(String message) + : base(message) + { + } + + public InvalidDefinitionException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidDefinitionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class CannotDeleteDefinitionWithRetainedBuildsException : BuildException + { + public CannotDeleteDefinitionWithRetainedBuildsException(String message) + : base(message) + { + } + + public CannotDeleteDefinitionWithRetainedBuildsException(String message, Exception ex) + : base(message, ex) + { + } + + protected CannotDeleteDefinitionWithRetainedBuildsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class CannotRestoreDeletedDraftWithoutRestoringParentException : BuildException + { + public CannotRestoreDeletedDraftWithoutRestoringParentException(String message) + : base(message) + { + } + + public CannotRestoreDeletedDraftWithoutRestoringParentException(String message, Exception ex) + : base(message, ex) + { + } + + protected CannotRestoreDeletedDraftWithoutRestoringParentException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidFolderException : BuildException + { + public InvalidFolderException(String message) + : base(message) + { + } + + public InvalidFolderException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidFolderException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidDefinitionQueryException", "GitHub.Build.WebApi.InvalidDefinitionQueryException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidDefinitionQueryException : BuildException + { + public InvalidDefinitionQueryException(String message) + : base(message) + { + } + + public InvalidDefinitionQueryException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidDefinitionQueryException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidFolderQueryException : BuildException + { + public InvalidFolderQueryException(String message) + : base(message) + { + } + + public InvalidFolderQueryException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidFolderQueryException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidEndpointUrlException : BuildException + { + public InvalidEndpointUrlException(String message) + : base(message) + { + } + + public InvalidEndpointUrlException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidEndpointUrlException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidLogLocationException", "GitHub.Build.WebApi.InvalidLogLocationException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidLogLocationException : BuildException + { + public InvalidLogLocationException(String message) + : base(message) + { + } + + public InvalidLogLocationException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidLogLocationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidProjectException", "GitHub.Build.WebApi.InvalidProjectException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidProjectException : BuildException + { + public InvalidProjectException(String message) + : base(message) + { + } + + public InvalidProjectException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidProjectException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidSourceLabelFormatException", "GitHub.Build.WebApi.InvalidSourceLabelFormatException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidSourceLabelFormatException : BuildException + { + public InvalidSourceLabelFormatException(String message) + : base(message) + { + } + + public InvalidSourceLabelFormatException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidSourceLabelFormatException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidTemplateException ", "GitHub.Build.WebApi.InvalidTemplateException , GitHub.Build.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class InvalidTemplateException : BuildException + { + public InvalidTemplateException(String message) + : base(message) + { + } + + public InvalidTemplateException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidTemplateException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "MetaTaskDefinitionMissingException", "GitHub.Build.WebApi.MetaTaskDefinitionMissingException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class MetaTaskDefinitionMissingException : BuildException + { + public MetaTaskDefinitionMissingException(String message) + : base(message) + { + } + + public MetaTaskDefinitionMissingException(String message, Exception ex) + : base(message, ex) + { + } + + protected MetaTaskDefinitionMissingException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class MissingEndpointInformationException : BuildException + { + public MissingEndpointInformationException(String message) + : base(message) + { + } + + public MissingEndpointInformationException(String message, Exception ex) + : base(message, ex) + { + } + + protected MissingEndpointInformationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class MissingRepositoryException : BuildException + { + public MissingRepositoryException(String message) + : base(message) + { + } + + public MissingRepositoryException(String message, Exception ex) + : base(message, ex) + { + } + protected MissingRepositoryException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class MissingTasksForDefinition : BuildException + { + public MissingTasksForDefinition(String message) + : base(message) + { + } + + public MissingTasksForDefinition(String message, Exception ex) + : base(message, ex) + { + } + protected MissingTasksForDefinition(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "NotSupportedOnXamlBuildException", "GitHub.Build.WebApi.NotSupportedOnXamlBuildException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class NotSupportedOnXamlBuildException : BuildException + { + public NotSupportedOnXamlBuildException(String message) + : base(message) + { + } + + public NotSupportedOnXamlBuildException(String message, Exception ex) + : base(message, ex) + { + } + + protected NotSupportedOnXamlBuildException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class OrchestrationTypeNotSupportedException : BuildException + { + public OrchestrationTypeNotSupportedException(String message) + : base(message) + { + } + + public OrchestrationTypeNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + protected OrchestrationTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ProcessTemplateDeletedException", "GitHub.Build.WebApi.ProcessTemplateDeletedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ProcessTemplateDeletedException : BuildException + { + public ProcessTemplateDeletedException(String message) + : base(message) + { + } + + public ProcessTemplateDeletedException(String message, Exception ex) + : base(message, ex) + { + } + + protected ProcessTemplateDeletedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ProcessTemplateNotFoundException", "GitHub.Build.WebApi.ProcessTemplateNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ProcessTemplateNotFoundException : BuildException + { + public ProcessTemplateNotFoundException(String message) + : base(message) + { + } + + public ProcessTemplateNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected ProcessTemplateNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ProjectConflictException", "GitHub.Build.WebApi.ProjectConflictException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ProjectConflictException : BuildException + { + public ProjectConflictException(String message) + : base(message) + { + } + + public ProjectConflictException(String message, Exception ex) + : base(message, ex) + { + } + + protected ProjectConflictException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "QueueExistsException", "GitHub.Build.WebApi.QueueExistsException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class QueueExistsException : BuildException + { + public QueueExistsException(String message) + : base(message) + { + } + + public QueueExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected QueueExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "QueueNotFoundException", "GitHub.Build.WebApi.QueueNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class QueueNotFoundException : BuildException + { + public QueueNotFoundException(String message) + : base(message) + { + } + + public QueueNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected QueueNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "RecreatingSubscriptionFailedException", "GitHub.Build.WebApi.RecreatingSubscriptionFailedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RecreatingSubscriptionFailedException : BuildException + { + public RecreatingSubscriptionFailedException(String message) + : base(message) + { + } + + public RecreatingSubscriptionFailedException(String message, Exception ex) + : base(message, ex) + { + } + + protected RecreatingSubscriptionFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ReportFormatTypeNotSupportedException", "GitHub.Build.WebApi.ReportFormatTypeNotSupportedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ReportFormatTypeNotSupportedException : BuildException + { + public ReportFormatTypeNotSupportedException(String message) + : base(message) + { + } + + public ReportFormatTypeNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected ReportFormatTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ReportStreamNotSupportedException", "GitHub.Build.WebApi.ReportStreamNotSupportedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ReportStreamNotSupportedException : BuildException + { + public ReportStreamNotSupportedException(String message) + : base(message) + { + } + + public ReportStreamNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected ReportStreamNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "RepositoryInformationInvalidException", "GitHub.Build.WebApi.RepositoryInformationInvalidException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RepositoryInformationInvalidException : BuildException + { + public RepositoryInformationInvalidException(String message) + : base(message) + { + } + + public RepositoryInformationInvalidException(String message, Exception ex) + : base(message, ex) + { + } + + protected RepositoryInformationInvalidException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class RequestContentException : BuildException + { + public RequestContentException(String message) + : base(message) + { + } + + public RequestContentException(String message, Exception ex) + : base(message, ex) + { + } + + protected RequestContentException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "RouteIdConflictException", "GitHub.Build.WebApi.RouteIdConflictException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RouteIdConflictException : BuildException + { + public RouteIdConflictException(String message) + : base(message) + { + } + + public RouteIdConflictException(String message, Exception ex) + : base(message, ex) + { + } + + protected RouteIdConflictException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TimelineNotFoundException", "GitHub.Build.WebApi.TimelineNotFoundException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TimelineNotFoundException : BuildException + { + public TimelineNotFoundException(String message) + : base(message) + { + } + + public TimelineNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected TimelineNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "VariableNameIsReservedException", "GitHub.Build.WebApi.VariableNameIsReservedException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VariableNameIsReservedException : BuildException + { + public VariableNameIsReservedException(String message) + : base(message) + { + } + + public VariableNameIsReservedException(String message, Exception ex) + : base(message, ex) + { + } + + protected VariableNameIsReservedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class VariableGroupsAccessDeniedException : BuildException + { + public VariableGroupsAccessDeniedException(String message) + : base(message) + { + } + + public VariableGroupsAccessDeniedException(String message, Exception ex) + : base(message, ex) + { + } + + protected VariableGroupsAccessDeniedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class MetricAggregationTypeNotSupportedException : BuildException + { + public MetricAggregationTypeNotSupportedException(String message) + : base(message) + { + } + + public MetricAggregationTypeNotSupportedException(String message, Exception ex) + : base(message, ex) + { + } + + protected MetricAggregationTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DefinitionTriggerAlreadyExistsException", "GitHub.Build.WebApi.DefinitionTriggerAlreadyExistsException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DefinitionTriggerAlreadyExistsException : BuildException + { + public DefinitionTriggerAlreadyExistsException(String message) + : base(message) + { + } + + public DefinitionTriggerAlreadyExistsException(String message, Exception ex) + : base(message, ex) + { + } + + protected DefinitionTriggerAlreadyExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidDefinitionInTriggerSourceException", "GitHub.Build.WebApi.InvalidDefinitionInTriggerSourceException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidDefinitionInTriggerSourceException : BuildException + { + public InvalidDefinitionInTriggerSourceException(String message) + : base(message) + { + } + + public InvalidDefinitionInTriggerSourceException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidDefinitionInTriggerSourceException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "CycleDetectedInProvidedBuildCompletionTriggersException", "GitHub.Build.WebApi.CycleDetectedInProvidedBuildCompletionTriggersException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class CycleDetectedInProvidedBuildCompletionTriggersException : BuildException + { + public CycleDetectedInProvidedBuildCompletionTriggersException(String message) + : base(message) + { + } + + public CycleDetectedInProvidedBuildCompletionTriggersException(String message, Exception ex) + : base(message, ex) + { + } + + protected CycleDetectedInProvidedBuildCompletionTriggersException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + + [Serializable] + [ExceptionMapping("0.0", "3.0", "UnsupportedBuildCompletionTriggerChainException", "GitHub.Build.WebApi.UnsupportedBuildCompletionTriggerChainException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class UnsupportedBuildCompletionTriggerChainException : BuildException + { + public UnsupportedBuildCompletionTriggerChainException(String message) + : base(message) + { + } + + public UnsupportedBuildCompletionTriggerChainException(String message, Exception ex) + : base(message, ex) + { + } + + protected UnsupportedBuildCompletionTriggerChainException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "CannotUpdateTriggeredByBuildException", "GitHub.Build.WebApi.CannotUpdateTriggeredByBuildException, GitHub.Build2.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class CannotUpdateTriggeredByBuildException : BuildException + { + public CannotUpdateTriggeredByBuildException(String message) + : base(message) + { + } + + public CannotUpdateTriggeredByBuildException(String message, Exception ex) + : base(message, ex) + { + } + + protected CannotUpdateTriggeredByBuildException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidGitVersionSpec : BuildException + { + public InvalidGitVersionSpec(String message) + : base(message) + { + } + + public InvalidGitVersionSpec(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidGitVersionSpec(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class AmbiguousDefinitionNameException : BuildException + { + public AmbiguousDefinitionNameException(String message) + : base(message) + { + } + + public AmbiguousDefinitionNameException(String message, Exception ex) + : base(message, ex) + { + } + + public AmbiguousDefinitionNameException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class DataProviderException : BuildException + { + public DataProviderException(String message) + : base(message) + { + } + + public DataProviderException(String message, Exception ex) + : base(message, ex) + { + } + + protected DataProviderException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Generated/BuildHttpClientBase.cs b/src/Sdk/BuildWebApi/Api/Generated/BuildHttpClientBase.cs new file mode 100644 index 00000000000..8b6de9580c5 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Generated/BuildHttpClientBase.cs @@ -0,0 +1,5744 @@ +/* + * --------------------------------------------------------- + * Copyright(C) Microsoft Corporation. All rights reserved. + * --------------------------------------------------------- + * + * --------------------------------------------------------- + * Generated file, DO NOT EDIT + * --------------------------------------------------------- + * + * See following wiki page for instructions on how to regenerate: + * https://aka.ms/azure-devops-client-generation + * + * Configuration file: + * tfs\client\build2\api\clientgeneratorconfigs\genclient.json + */ + +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Formatting; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Build.WebApi +{ + [ResourceArea(BuildResourceIds.AreaId)] + public abstract class BuildHttpClientBase : BuildHttpClientCompatBase + { + public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public BuildHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public BuildHttpClientBase(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + /// + /// [Preview API] Associates an artifact with a build. + /// + /// The artifact. + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task CreateArtifactAsync( + BuildArtifact artifact, + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent(artifact, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Associates an artifact with a build. + /// + /// The artifact. + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task CreateArtifactAsync( + BuildArtifact artifact, + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent(artifact, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets a specific artifact for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The name of the artifact. + /// + /// The cancellation token to cancel operation. + public virtual Task GetArtifactAsync( + string project, + int buildId, + string artifactName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + queryParams.Add("artifactName", artifactName); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a specific artifact for a build. + /// + /// Project ID + /// The ID of the build. + /// The name of the artifact. + /// + /// The cancellation token to cancel operation. + public virtual Task GetArtifactAsync( + Guid project, + int buildId, + string artifactName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + queryParams.Add("artifactName", artifactName); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a specific artifact for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The name of the artifact. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetArtifactContentZipAsync( + string project, + int buildId, + string artifactName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + queryParams.Add("artifactName", artifactName); + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.5"), + queryParameters: queryParams, + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets a specific artifact for a build. + /// + /// Project ID + /// The ID of the build. + /// The name of the artifact. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetArtifactContentZipAsync( + Guid project, + int buildId, + string artifactName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + queryParams.Add("artifactName", artifactName); + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.5"), + queryParameters: queryParams, + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets all artifacts for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetArtifactsAsync( + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all artifacts for a build. + /// + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetArtifactsAsync( + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a file from the build. + /// + /// Project ID or project name + /// The ID of the build. + /// The name of the artifact. + /// The primary key for the file. + /// The name that the file will be set to. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetFileAsync( + string project, + int buildId, + string artifactName, + string fileId, + string fileName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + queryParams.Add("artifactName", artifactName); + queryParams.Add("fileId", fileId); + queryParams.Add("fileName", fileName); + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.5"), + queryParameters: queryParams, + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets a file from the build. + /// + /// Project ID + /// The ID of the build. + /// The name of the artifact. + /// The primary key for the file. + /// The name that the file will be set to. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetFileAsync( + Guid project, + int buildId, + string artifactName, + string fileId, + string fileName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1db06c96-014e-44e1-ac91-90b2d4b3e984"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + queryParams.Add("artifactName", artifactName); + queryParams.Add("fileId", fileId); + queryParams.Add("fileName", fileName); + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.5"), + queryParameters: queryParams, + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets the list of attachments of a specific type that are associated with a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The type of attachment. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAttachmentsAsync( + string project, + int buildId, + string type, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f2192269-89fa-4f94-baf6-8fb128c55159"); + object routeValues = new { project = project, buildId = buildId, type = type }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the list of attachments of a specific type that are associated with a build. + /// + /// Project ID + /// The ID of the build. + /// The type of attachment. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAttachmentsAsync( + Guid project, + int buildId, + string type, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f2192269-89fa-4f94-baf6-8fb128c55159"); + object routeValues = new { project = project, buildId = buildId, type = type }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a specific attachment. + /// + /// Project ID or project name + /// The ID of the build. + /// The ID of the timeline. + /// The ID of the timeline record. + /// The type of the attachment. + /// The name of the attachment. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetAttachmentAsync( + string project, + int buildId, + Guid timelineId, + Guid recordId, + string type, + string name, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("af5122d3-3438-485e-a25a-2dbbfde84ee6"); + object routeValues = new { project = project, buildId = buildId, timelineId = timelineId, recordId = recordId, type = type, name = name }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets a specific attachment. + /// + /// Project ID + /// The ID of the build. + /// The ID of the timeline. + /// The ID of the timeline record. + /// The type of the attachment. + /// The name of the attachment. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetAttachmentAsync( + Guid project, + int buildId, + Guid timelineId, + Guid recordId, + string type, + string name, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("af5122d3-3438-485e-a25a-2dbbfde84ee6"); + object routeValues = new { project = project, buildId = buildId, timelineId = timelineId, recordId = recordId, type = type, name = name }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] + /// + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task> AuthorizeProjectResourcesAsync( + IEnumerable resources, + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("398c85bc-81aa-4822-947c-a194a05f0fef"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent>(resources, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task> AuthorizeProjectResourcesAsync( + IEnumerable resources, + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("398c85bc-81aa-4822-947c-a194a05f0fef"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent>(resources, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetProjectResourcesAsync( + string project, + string type = null, + string id = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("398c85bc-81aa-4822-947c-a194a05f0fef"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (type != null) + { + queryParams.Add("type", type); + } + if (id != null) + { + queryParams.Add("id", id); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetProjectResourcesAsync( + Guid project, + string type = null, + string id = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("398c85bc-81aa-4822-947c-a194a05f0fef"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (type != null) + { + queryParams.Add("type", type); + } + if (id != null) + { + queryParams.Add("id", id); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a badge that indicates the status of the most recent build for a definition. Note that this API is deprecated. Prefer StatusBadgeController.GetStatusBadge. + /// + /// The project ID or name. + /// The ID of the definition. + /// The name of the branch. + /// + /// The cancellation token to cancel operation. + [Obsolete("This endpoint is deprecated. Please see the Build Status REST endpoint.")] + public virtual Task GetBadgeAsync( + Guid project, + int definitionId, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("de6a4df8-22cd-44ee-af2d-39f6aa7a4261"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of branches for the given source code repository. + /// + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// The vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// If supplied, the name of the branch to check for specifically. + /// + /// The cancellation token to cancel operation. + public virtual Task> ListBranchesAsync( + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e05d4403-9b81-4244-8763-20fde28d1976"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of branches for the given source code repository. + /// + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// The vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// If supplied, the name of the branch to check for specifically. + /// + /// The cancellation token to cancel operation. + public virtual Task> ListBranchesAsync( + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e05d4403-9b81-4244-8763-20fde28d1976"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a badge that indicates the status of the most recent build for the specified branch. + /// + /// Project ID or project name + /// The repository type. + /// The repository ID. + /// The branch name. + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildBadgeAsync( + string project, + string repoType, + string repoId = null, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("21b3b9ce-fad5-4567-9ad0-80679794e003"); + object routeValues = new { project = project, repoType = repoType }; + + List> queryParams = new List>(); + if (repoId != null) + { + queryParams.Add("repoId", repoId); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a badge that indicates the status of the most recent build for the specified branch. + /// + /// Project ID + /// The repository type. + /// The repository ID. + /// The branch name. + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildBadgeAsync( + Guid project, + string repoType, + string repoId = null, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("21b3b9ce-fad5-4567-9ad0-80679794e003"); + object routeValues = new { project = project, repoType = repoType }; + + List> queryParams = new List>(); + if (repoId != null) + { + queryParams.Add("repoId", repoId); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a badge that indicates the status of the most recent build for the specified branch. + /// + /// Project ID or project name + /// The repository type. + /// The repository ID. + /// The branch name. + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildBadgeDataAsync( + string project, + string repoType, + string repoId = null, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("21b3b9ce-fad5-4567-9ad0-80679794e003"); + object routeValues = new { project = project, repoType = repoType }; + + List> queryParams = new List>(); + if (repoId != null) + { + queryParams.Add("repoId", repoId); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a badge that indicates the status of the most recent build for the specified branch. + /// + /// Project ID + /// The repository type. + /// The repository ID. + /// The branch name. + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildBadgeDataAsync( + Guid project, + string repoType, + string repoId = null, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("21b3b9ce-fad5-4567-9ad0-80679794e003"); + object routeValues = new { project = project, repoType = repoType }; + + List> queryParams = new List>(); + if (repoId != null) + { + queryParams.Add("repoId", repoId); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Deletes a build. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteBuildAsync( + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Deletes a build. + /// + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteBuildAsync( + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Gets a build + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildAsync( + string project, + int buildId, + string propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (propertyFilters != null) + { + queryParams.Add("propertyFilters", propertyFilters); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a build + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildAsync( + Guid project, + int buildId, + string propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (propertyFilters != null) + { + queryParams.Add("propertyFilters", propertyFilters); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of builds. + /// + /// Project ID or project name + /// A comma-delimited list of definition IDs. If specified, filters to builds for these definitions. + /// A comma-delimited list of queue IDs. If specified, filters to builds that ran against these queues. + /// If specified, filters to builds that match this build number. Append * to do a prefix search. + /// If specified, filters to builds that finished/started/queued after this date based on the queryOrder specified. + /// If specified, filters to builds that finished/started/queued before this date based on the queryOrder specified. + /// If specified, filters to builds requested for the specified user. + /// If specified, filters to builds that match this reason. + /// If specified, filters to builds that match this status. + /// If specified, filters to builds that match this result. + /// A comma-delimited list of tags. If specified, filters to builds that have the specified tags. + /// A comma-delimited list of properties to retrieve. + /// The maximum number of builds to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of builds. + /// The maximum number of builds to return per definition. + /// Indicates whether to exclude, include, or only return deleted builds. + /// The order in which builds should be returned. + /// If specified, filters to builds that built branches that built this branch. + /// A comma-delimited list that specifies the IDs of builds to retrieve. + /// If specified, filters to builds that built from this repository. + /// If specified, filters to builds that built from repositories of this type. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildsAsync( + string project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minTime = null, + DateTime? maxTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + string repositoryId = null, + string repositoryType = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (definitions != null && definitions.Any()) + { + queryParams.Add("definitions", string.Join(",", definitions)); + } + if (queues != null && queues.Any()) + { + queryParams.Add("queues", string.Join(",", queues)); + } + if (buildNumber != null) + { + queryParams.Add("buildNumber", buildNumber); + } + if (minTime != null) + { + AddDateTimeToQueryParams(queryParams, "minTime", minTime.Value); + } + if (maxTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxTime", maxTime.Value); + } + if (requestedFor != null) + { + queryParams.Add("requestedFor", requestedFor); + } + if (reasonFilter != null) + { + queryParams.Add("reasonFilter", reasonFilter.Value.ToString()); + } + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (resultFilter != null) + { + queryParams.Add("resultFilter", resultFilter.Value.ToString()); + } + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (maxBuildsPerDefinition != null) + { + queryParams.Add("maxBuildsPerDefinition", maxBuildsPerDefinition.Value.ToString(CultureInfo.InvariantCulture)); + } + if (deletedFilter != null) + { + queryParams.Add("deletedFilter", deletedFilter.Value.ToString()); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + if (buildIds != null && buildIds.Any()) + { + queryParams.Add("buildIds", string.Join(",", buildIds)); + } + if (repositoryId != null) + { + queryParams.Add("repositoryId", repositoryId); + } + if (repositoryType != null) + { + queryParams.Add("repositoryType", repositoryType); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of builds. + /// + /// Project ID + /// A comma-delimited list of definition IDs. If specified, filters to builds for these definitions. + /// A comma-delimited list of queue IDs. If specified, filters to builds that ran against these queues. + /// If specified, filters to builds that match this build number. Append * to do a prefix search. + /// If specified, filters to builds that finished/started/queued after this date based on the queryOrder specified. + /// If specified, filters to builds that finished/started/queued before this date based on the queryOrder specified. + /// If specified, filters to builds requested for the specified user. + /// If specified, filters to builds that match this reason. + /// If specified, filters to builds that match this status. + /// If specified, filters to builds that match this result. + /// A comma-delimited list of tags. If specified, filters to builds that have the specified tags. + /// A comma-delimited list of properties to retrieve. + /// The maximum number of builds to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of builds. + /// The maximum number of builds to return per definition. + /// Indicates whether to exclude, include, or only return deleted builds. + /// The order in which builds should be returned. + /// If specified, filters to builds that built branches that built this branch. + /// A comma-delimited list that specifies the IDs of builds to retrieve. + /// If specified, filters to builds that built from this repository. + /// If specified, filters to builds that built from repositories of this type. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildsAsync( + Guid project, + IEnumerable definitions = null, + IEnumerable queues = null, + string buildNumber = null, + DateTime? minTime = null, + DateTime? maxTime = null, + string requestedFor = null, + BuildReason? reasonFilter = null, + BuildStatus? statusFilter = null, + BuildResult? resultFilter = null, + IEnumerable tagFilters = null, + IEnumerable properties = null, + int? top = null, + string continuationToken = null, + int? maxBuildsPerDefinition = null, + QueryDeletedOption? deletedFilter = null, + BuildQueryOrder? queryOrder = null, + string branchName = null, + IEnumerable buildIds = null, + string repositoryId = null, + string repositoryType = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (definitions != null && definitions.Any()) + { + queryParams.Add("definitions", string.Join(",", definitions)); + } + if (queues != null && queues.Any()) + { + queryParams.Add("queues", string.Join(",", queues)); + } + if (buildNumber != null) + { + queryParams.Add("buildNumber", buildNumber); + } + if (minTime != null) + { + AddDateTimeToQueryParams(queryParams, "minTime", minTime.Value); + } + if (maxTime != null) + { + AddDateTimeToQueryParams(queryParams, "maxTime", maxTime.Value); + } + if (requestedFor != null) + { + queryParams.Add("requestedFor", requestedFor); + } + if (reasonFilter != null) + { + queryParams.Add("reasonFilter", reasonFilter.Value.ToString()); + } + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (resultFilter != null) + { + queryParams.Add("resultFilter", resultFilter.Value.ToString()); + } + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (maxBuildsPerDefinition != null) + { + queryParams.Add("maxBuildsPerDefinition", maxBuildsPerDefinition.Value.ToString(CultureInfo.InvariantCulture)); + } + if (deletedFilter != null) + { + queryParams.Add("deletedFilter", deletedFilter.Value.ToString()); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + if (buildIds != null && buildIds.Any()) + { + queryParams.Add("buildIds", string.Join(",", buildIds)); + } + if (repositoryId != null) + { + queryParams.Add("repositoryId", repositoryId); + } + if (repositoryType != null) + { + queryParams.Add("repositoryType", repositoryType); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Queues a build + /// + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task QueueBuildAsync( + Build build, + string project, + bool? ignoreWarnings = null, + string checkInTicket = null, + int? sourceBuildId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (ignoreWarnings != null) + { + queryParams.Add("ignoreWarnings", ignoreWarnings.Value.ToString()); + } + if (checkInTicket != null) + { + queryParams.Add("checkInTicket", checkInTicket); + } + if (sourceBuildId != null) + { + queryParams.Add("sourceBuildId", sourceBuildId.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Queues a build + /// + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task QueueBuildAsync( + Build build, + Guid project, + bool? ignoreWarnings = null, + string checkInTicket = null, + int? sourceBuildId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (ignoreWarnings != null) + { + queryParams.Add("ignoreWarnings", ignoreWarnings.Value.ToString()); + } + if (checkInTicket != null) + { + queryParams.Add("checkInTicket", checkInTicket); + } + if (sourceBuildId != null) + { + queryParams.Add("sourceBuildId", sourceBuildId.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates a build. + /// + /// The build. + /// Project ID or project name + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + private protected virtual Task UpdateBuildAsync( + Build build, + string project, + int buildId, + bool? retry = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (retry != null) + { + queryParams.Add("retry", retry.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates a build. + /// + /// The build. + /// Project ID + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + private protected virtual Task UpdateBuildAsync( + Build build, + Guid project, + int buildId, + bool? retry = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent(build, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (retry != null) + { + queryParams.Add("retry", retry.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates multiple builds. + /// + /// The builds to update. + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task> UpdateBuildsAsync( + IEnumerable builds, + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent>(builds, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates multiple builds. + /// + /// The builds to update. + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task> UpdateBuildsAsync( + IEnumerable builds, + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("0cd358e1-9217-4d94-8269-1c1ee6f93dcf"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent>(builds, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 5), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets the changes associated with a build + /// + /// Project ID or project name + /// + /// + /// The maximum number of changes to return + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildChangesAsync( + string project, + int buildId, + string continuationToken = null, + int? top = null, + bool? includeSourceChange = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("54572c7b-bbd3-45d4-80dc-28be08941620"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (includeSourceChange != null) + { + queryParams.Add("includeSourceChange", includeSourceChange.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the changes associated with a build + /// + /// Project ID + /// + /// + /// The maximum number of changes to return + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildChangesAsync( + Guid project, + int buildId, + string continuationToken = null, + int? top = null, + bool? includeSourceChange = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("54572c7b-bbd3-45d4-80dc-28be08941620"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (includeSourceChange != null) + { + queryParams.Add("includeSourceChange", includeSourceChange.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the changes made to the repository between two given builds. + /// + /// Project ID or project name + /// The ID of the first build. + /// The ID of the last build. + /// The maximum number of changes to return. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetChangesBetweenBuildsAsync( + string project, + int? fromBuildId = null, + int? toBuildId = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f10f0ea5-18a1-43ec-a8fb-2042c7be9b43"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (fromBuildId != null) + { + queryParams.Add("fromBuildId", fromBuildId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (toBuildId != null) + { + queryParams.Add("toBuildId", toBuildId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the changes made to the repository between two given builds. + /// + /// Project ID + /// The ID of the first build. + /// The ID of the last build. + /// The maximum number of changes to return. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetChangesBetweenBuildsAsync( + Guid project, + int? fromBuildId = null, + int? toBuildId = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f10f0ea5-18a1-43ec-a8fb-2042c7be9b43"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (fromBuildId != null) + { + queryParams.Add("fromBuildId", fromBuildId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (toBuildId != null) + { + queryParams.Add("toBuildId", toBuildId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Creates a new definition. + /// + /// The definition. + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task CreateDefinitionAsync( + BuildDefinition definition, + string project, + int? definitionToCloneId = null, + int? definitionToCloneRevision = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(definition, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (definitionToCloneId != null) + { + queryParams.Add("definitionToCloneId", definitionToCloneId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (definitionToCloneRevision != null) + { + queryParams.Add("definitionToCloneRevision", definitionToCloneRevision.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Creates a new definition. + /// + /// The definition. + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task CreateDefinitionAsync( + BuildDefinition definition, + Guid project, + int? definitionToCloneId = null, + int? definitionToCloneRevision = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(definition, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (definitionToCloneId != null) + { + queryParams.Add("definitionToCloneId", definitionToCloneId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (definitionToCloneRevision != null) + { + queryParams.Add("definitionToCloneRevision", definitionToCloneRevision.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Deletes a definition and all associated builds. + /// + /// Project ID or project name + /// The ID of the definition. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteDefinitionAsync( + string project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Deletes a definition and all associated builds. + /// + /// Project ID + /// The ID of the definition. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteDefinitionAsync( + Guid project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Gets a definition, optionally at a specific revision. + /// + /// Project ID or project name + /// The ID of the definition. + /// The revision number to retrieve. If this is not specified, the latest version will be returned. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list of properties to include in the results. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetDefinitionAsync( + string project, + int definitionId, + int? revision = null, + DateTime? minMetricsTime = null, + IEnumerable propertyFilters = null, + bool? includeLatestBuilds = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a definition, optionally at a specific revision. + /// + /// Project ID + /// The ID of the definition. + /// The revision number to retrieve. If this is not specified, the latest version will be returned. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list of properties to include in the results. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetDefinitionAsync( + Guid project, + int definitionId, + int? revision = null, + DateTime? minMetricsTime = null, + IEnumerable propertyFilters = null, + bool? includeLatestBuilds = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of definitions. + /// + /// Project ID or project name + /// If specified, filters to definitions whose names match this pattern. + /// A repository ID. If specified, filters to definitions that use this repository. + /// If specified, filters to definitions that have a repository of this type. + /// Indicates the order in which definitions should be returned. + /// The maximum number of definitions to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list that specifies the IDs of definitions to retrieve. + /// If specified, filters to definitions under this folder. + /// If specified, filters to definitions that have builds after this date. + /// If specified, filters to definitions that do not have builds after this date. + /// Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned. + /// Indicates whether to return the latest and latest completed builds for this definition. + /// If specified, filters to definitions that use the specified task. + /// If specified, filters to definitions with the given process type. + /// If specified, filters to YAML definitions that match the given filename. + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + string project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (name != null) + { + queryParams.Add("name", name); + } + if (repositoryId != null) + { + queryParams.Add("repositoryId", repositoryId); + } + if (repositoryType != null) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (path != null) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + if (processType != null) + { + queryParams.Add("processType", processType.Value.ToString(CultureInfo.InvariantCulture)); + } + if (yamlFilename != null) + { + queryParams.Add("yamlFilename", yamlFilename); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of definitions. + /// + /// Project ID + /// If specified, filters to definitions whose names match this pattern. + /// A repository ID. If specified, filters to definitions that use this repository. + /// If specified, filters to definitions that have a repository of this type. + /// Indicates the order in which definitions should be returned. + /// The maximum number of definitions to return. + /// A continuation token, returned by a previous call to this method, that can be used to return the next set of definitions. + /// If specified, indicates the date from which metrics should be included. + /// A comma-delimited list that specifies the IDs of definitions to retrieve. + /// If specified, filters to definitions under this folder. + /// If specified, filters to definitions that have builds after this date. + /// If specified, filters to definitions that do not have builds after this date. + /// Indicates whether the full definitions should be returned. By default, shallow representations of the definitions are returned. + /// Indicates whether to return the latest and latest completed builds for this definition. + /// If specified, filters to definitions that use the specified task. + /// If specified, filters to definitions with the given process type. + /// If specified, filters to YAML definitions that match the given filename. + /// + /// The cancellation token to cancel operation. + protected virtual Task> GetDefinitionsAsync( + Guid project, + string name = null, + string repositoryId = null, + string repositoryType = null, + DefinitionQueryOrder? queryOrder = null, + int? top = null, + string continuationToken = null, + DateTime? minMetricsTime = null, + IEnumerable definitionIds = null, + string path = null, + DateTime? builtAfter = null, + DateTime? notBuiltAfter = null, + bool? includeAllProperties = null, + bool? includeLatestBuilds = null, + Guid? taskIdFilter = null, + int? processType = null, + string yamlFilename = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (name != null) + { + queryParams.Add("name", name); + } + if (repositoryId != null) + { + queryParams.Add("repositoryId", repositoryId); + } + if (repositoryType != null) + { + queryParams.Add("repositoryType", repositoryType); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + if (definitionIds != null && definitionIds.Any()) + { + queryParams.Add("definitionIds", string.Join(",", definitionIds)); + } + if (path != null) + { + queryParams.Add("path", path); + } + if (builtAfter != null) + { + AddDateTimeToQueryParams(queryParams, "builtAfter", builtAfter.Value); + } + if (notBuiltAfter != null) + { + AddDateTimeToQueryParams(queryParams, "notBuiltAfter", notBuiltAfter.Value); + } + if (includeAllProperties != null) + { + queryParams.Add("includeAllProperties", includeAllProperties.Value.ToString()); + } + if (includeLatestBuilds != null) + { + queryParams.Add("includeLatestBuilds", includeLatestBuilds.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + if (processType != null) + { + queryParams.Add("processType", processType.Value.ToString(CultureInfo.InvariantCulture)); + } + if (yamlFilename != null) + { + queryParams.Add("yamlFilename", yamlFilename); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Restores a deleted definition + /// + /// Project ID or project name + /// The identifier of the definition to restore. + /// When false, restores a deleted definition. + /// + /// The cancellation token to cancel operation. + public virtual Task RestoreDefinitionAsync( + string project, + int definitionId, + bool deleted, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + queryParams.Add("deleted", deleted.ToString()); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Restores a deleted definition + /// + /// Project ID + /// The identifier of the definition to restore. + /// When false, restores a deleted definition. + /// + /// The cancellation token to cancel operation. + public virtual Task RestoreDefinitionAsync( + Guid project, + int definitionId, + bool deleted, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + queryParams.Add("deleted", deleted.ToString()); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Updates an existing definition. + /// + /// The new version of the defintion. + /// Project ID or project name + /// The ID of the definition. + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateDefinitionAsync( + BuildDefinition definition, + string project, + int definitionId, + int? secretsSourceDefinitionId = null, + int? secretsSourceDefinitionRevision = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + HttpContent content = new ObjectContent(definition, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (secretsSourceDefinitionId != null) + { + queryParams.Add("secretsSourceDefinitionId", secretsSourceDefinitionId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (secretsSourceDefinitionRevision != null) + { + queryParams.Add("secretsSourceDefinitionRevision", secretsSourceDefinitionRevision.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates an existing definition. + /// + /// The new version of the defintion. + /// Project ID + /// The ID of the definition. + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateDefinitionAsync( + BuildDefinition definition, + Guid project, + int definitionId, + int? secretsSourceDefinitionId = null, + int? secretsSourceDefinitionRevision = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("dbeaf647-6167-421a-bda9-c9327b25e2e6"); + object routeValues = new { project = project, definitionId = definitionId }; + HttpContent content = new ObjectContent(definition, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (secretsSourceDefinitionId != null) + { + queryParams.Add("secretsSourceDefinitionId", secretsSourceDefinitionId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (secretsSourceDefinitionRevision != null) + { + queryParams.Add("secretsSourceDefinitionRevision", secretsSourceDefinitionRevision.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 7), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets the contents of a file in the given source code repository. + /// + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// The identifier of the commit or branch from which a file's contents are retrieved. + /// The path to the file to retrieve, relative to the root of the repository. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetFileContentsAsync( + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + string commitOrBranch = null, + string path = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("29d12225-b1d9-425f-b668-6c594a981313"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (commitOrBranch != null) + { + queryParams.Add("commitOrBranch", commitOrBranch); + } + if (path != null) + { + queryParams.Add("path", path); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.1"), + queryParameters: queryParams, + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets the contents of a file in the given source code repository. + /// + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// The identifier of the commit or branch from which a file's contents are retrieved. + /// The path to the file to retrieve, relative to the root of the repository. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetFileContentsAsync( + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + string commitOrBranch = null, + string path = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("29d12225-b1d9-425f-b668-6c594a981313"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (commitOrBranch != null) + { + queryParams.Add("commitOrBranch", commitOrBranch); + } + if (path != null) + { + queryParams.Add("path", path); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.1"), + queryParameters: queryParams, + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Creates a new folder. + /// + /// The folder. + /// Project ID or project name + /// The full path of the folder. + /// + /// The cancellation token to cancel operation. + public virtual Task CreateFolderAsync( + Folder folder, + string project, + string path, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(folder, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("path", path); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Creates a new folder. + /// + /// The folder. + /// Project ID + /// The full path of the folder. + /// + /// The cancellation token to cancel operation. + public virtual Task CreateFolderAsync( + Folder folder, + Guid project, + string path, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(folder, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("path", path); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Deletes a definition folder. Definitions and their corresponding builds will also be deleted. + /// + /// Project ID or project name + /// The full path to the folder. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteFolderAsync( + string project, + string path, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + queryParams.Add("path", path); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Deletes a definition folder. Definitions and their corresponding builds will also be deleted. + /// + /// Project ID + /// The full path to the folder. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteFolderAsync( + Guid project, + string path, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + queryParams.Add("path", path); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Gets a list of build definition folders. + /// + /// Project ID or project name + /// The path to start with. + /// The order in which folders should be returned. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFoldersAsync( + string project, + string path = null, + FolderQueryOrder? queryOrder = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project, path = path }; + + List> queryParams = new List>(); + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of build definition folders. + /// + /// Project ID + /// The path to start with. + /// The order in which folders should be returned. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetFoldersAsync( + Guid project, + string path = null, + FolderQueryOrder? queryOrder = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project, path = path }; + + List> queryParams = new List>(); + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Updates an existing folder at given existing path + /// + /// The new version of the folder. + /// Project ID or project name + /// The full path to the folder. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateFolderAsync( + Folder folder, + string project, + string path, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(folder, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("path", path); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates an existing folder at given existing path + /// + /// The new version of the folder. + /// Project ID + /// The full path to the folder. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateFolderAsync( + Folder folder, + Guid project, + string path, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("a906531b-d2da-4f55-bda7-f3e676cc50d9"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(folder, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("path", path); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets the latest build for a definition, optionally scoped to a specific branch. + /// + /// Project ID or project name + /// definition name with optional leading folder path, or the definition id + /// optional parameter that indicates the specific branch to use + /// + /// The cancellation token to cancel operation. + public virtual Task GetLatestBuildAsync( + string project, + string definition, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("54481611-01f4-47f3-998f-160da0f0c229"); + object routeValues = new { project = project, definition = definition }; + + List> queryParams = new List>(); + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the latest build for a definition, optionally scoped to a specific branch. + /// + /// Project ID + /// definition name with optional leading folder path, or the definition id + /// optional parameter that indicates the specific branch to use + /// + /// The cancellation token to cancel operation. + public virtual Task GetLatestBuildAsync( + Guid project, + string definition, + string branchName = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("54481611-01f4-47f3-998f-160da0f0c229"); + object routeValues = new { project = project, definition = definition }; + + List> queryParams = new List>(); + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets an individual log file for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The ID of the log file. + /// The start line. + /// The end line. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildLogAsync( + string project, + int buildId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + queryParameters: queryParams, + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets an individual log file for a build. + /// + /// Project ID + /// The ID of the build. + /// The ID of the log file. + /// The start line. + /// The end line. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildLogAsync( + Guid project, + int buildId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + queryParameters: queryParams, + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets an individual log file for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The ID of the log file. + /// The start line. + /// The end line. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildLogLinesAsync( + string project, + int buildId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets an individual log file for a build. + /// + /// Project ID + /// The ID of the build. + /// The ID of the log file. + /// The start line. + /// The end line. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildLogLinesAsync( + Guid project, + int buildId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the logs for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildLogsAsync( + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the logs for a build. + /// + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildLogsAsync( + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the logs for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildLogsZipAsync( + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets the logs for a build. + /// + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildLogsZipAsync( + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets an individual log file for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The ID of the log file. + /// The start line. + /// The end line. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildLogZipAsync( + string project, + int buildId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + queryParameters: queryParams, + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets an individual log file for a build. + /// + /// Project ID + /// The ID of the build. + /// The ID of the log file. + /// The start line. + /// The end line. + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildLogZipAsync( + Guid project, + int buildId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("35a80daf-7f30-45fc-86e8-6b813d9c90df"); + object routeValues = new { project = project, buildId = buildId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + queryParameters: queryParams, + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets build metrics for a project. + /// + /// Project ID or project name + /// The aggregation type to use (hourly, daily). + /// The date from which to calculate metrics. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetProjectMetricsAsync( + string project, + string metricAggregationType = null, + DateTime? minMetricsTime = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7433fae7-a6bc-41dc-a6e2-eef9005ce41a"); + object routeValues = new { project = project, metricAggregationType = metricAggregationType }; + + List> queryParams = new List>(); + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets build metrics for a project. + /// + /// Project ID + /// The aggregation type to use (hourly, daily). + /// The date from which to calculate metrics. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetProjectMetricsAsync( + Guid project, + string metricAggregationType = null, + DateTime? minMetricsTime = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7433fae7-a6bc-41dc-a6e2-eef9005ce41a"); + object routeValues = new { project = project, metricAggregationType = metricAggregationType }; + + List> queryParams = new List>(); + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets build metrics for a definition. + /// + /// Project ID or project name + /// The ID of the definition. + /// The date from which to calculate metrics. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionMetricsAsync( + string project, + int definitionId, + DateTime? minMetricsTime = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d973b939-0ce0-4fec-91d8-da3940fa1827"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets build metrics for a definition. + /// + /// Project ID + /// The ID of the definition. + /// The date from which to calculate metrics. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionMetricsAsync( + Guid project, + int definitionId, + DateTime? minMetricsTime = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d973b939-0ce0-4fec-91d8-da3940fa1827"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (minMetricsTime != null) + { + AddDateTimeToQueryParams(queryParams, "minMetricsTime", minMetricsTime.Value); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all build definition options supported by the system. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildOptionDefinitionsAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("591cb5a4-2d46-4f3a-a697-5cd42b6bd332"); + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all build definition options supported by the system. + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildOptionDefinitionsAsync( + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("591cb5a4-2d46-4f3a-a697-5cd42b6bd332"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all build definition options supported by the system. + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildOptionDefinitionsAsync( + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("591cb5a4-2d46-4f3a-a697-5cd42b6bd332"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the contents of a directory in the given source code repository. + /// + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// The identifier of the commit or branch from which a file's contents are retrieved. + /// The path contents to list, relative to the root of the repository. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetPathContentsAsync( + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + string commitOrBranch = null, + string path = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7944d6fb-df01-4709-920a-7a189aa34037"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (commitOrBranch != null) + { + queryParams.Add("commitOrBranch", commitOrBranch); + } + if (path != null) + { + queryParams.Add("path", path); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the contents of a directory in the given source code repository. + /// + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get branches. Can only be omitted for providers that do not support multiple repositories. + /// The identifier of the commit or branch from which a file's contents are retrieved. + /// The path contents to list, relative to the root of the repository. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetPathContentsAsync( + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + string commitOrBranch = null, + string path = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7944d6fb-df01-4709-920a-7a189aa34037"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (commitOrBranch != null) + { + queryParams.Add("commitOrBranch", commitOrBranch); + } + if (path != null) + { + queryParams.Add("path", path); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets properties for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// A comma-delimited list of properties. If specified, filters to these specific properties. + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildPropertiesAsync( + string project, + int buildId, + IEnumerable filter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0a6312e9-0627-49b7-8083-7d74a64849c9"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (filter != null && filter.Any()) + { + queryParams.Add("filter", string.Join(",", filter)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets properties for a build. + /// + /// Project ID + /// The ID of the build. + /// A comma-delimited list of properties. If specified, filters to these specific properties. + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildPropertiesAsync( + Guid project, + int buildId, + IEnumerable filter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0a6312e9-0627-49b7-8083-7d74a64849c9"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (filter != null && filter.Any()) + { + queryParams.Add("filter", string.Join(",", filter)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets properties for a definition. + /// + /// Project ID or project name + /// The ID of the definition. + /// A comma-delimited list of properties. If specified, filters to these specific properties. + /// + /// The cancellation token to cancel operation. + public virtual Task GetDefinitionPropertiesAsync( + string project, + int definitionId, + IEnumerable filter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d9826ad7-2a68-46a9-a6e9-677698777895"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (filter != null && filter.Any()) + { + queryParams.Add("filter", string.Join(",", filter)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets properties for a definition. + /// + /// Project ID + /// The ID of the definition. + /// A comma-delimited list of properties. If specified, filters to these specific properties. + /// + /// The cancellation token to cancel operation. + public virtual Task GetDefinitionPropertiesAsync( + Guid project, + int definitionId, + IEnumerable filter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d9826ad7-2a68-46a9-a6e9-677698777895"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (filter != null && filter.Any()) + { + queryParams.Add("filter", string.Join(",", filter)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a pull request object from source provider. + /// + /// Project ID or project name + /// The name of the source provider. + /// Vendor-specific id of the pull request. + /// Vendor-specific identifier or the name of the repository that contains the pull request. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// + /// The cancellation token to cancel operation. + public virtual Task GetPullRequestAsync( + string project, + string providerName, + string pullRequestId, + string repositoryId = null, + Guid? serviceEndpointId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d8763ec7-9ff0-4fb4-b2b2-9d757906ff14"); + object routeValues = new { project = project, providerName = providerName, pullRequestId = pullRequestId }; + + List> queryParams = new List>(); + if (repositoryId != null) + { + queryParams.Add("repositoryId", repositoryId); + } + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a pull request object from source provider. + /// + /// Project ID + /// The name of the source provider. + /// Vendor-specific id of the pull request. + /// Vendor-specific identifier or the name of the repository that contains the pull request. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// + /// The cancellation token to cancel operation. + public virtual Task GetPullRequestAsync( + Guid project, + string providerName, + string pullRequestId, + string repositoryId = null, + Guid? serviceEndpointId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d8763ec7-9ff0-4fb4-b2b2-9d757906ff14"); + object routeValues = new { project = project, providerName = providerName, pullRequestId = pullRequestId }; + + List> queryParams = new List>(); + if (repositoryId != null) + { + queryParams.Add("repositoryId", repositoryId); + } + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a build report. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildReportAsync( + string project, + int buildId, + string type = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("45bcaa88-67e1-4042-a035-56d3b4a7d44c"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (type != null) + { + queryParams.Add("type", type); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a build report. + /// + /// Project ID + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildReportAsync( + Guid project, + int buildId, + string type = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("45bcaa88-67e1-4042-a035-56d3b4a7d44c"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (type != null) + { + queryParams.Add("type", type); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a build report. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildReportHtmlContentAsync( + string project, + int buildId, + string type = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("45bcaa88-67e1-4042-a035-56d3b4a7d44c"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (type != null) + { + queryParams.Add("type", type); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + queryParameters: queryParams, + mediaType: "text/html", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets a build report. + /// + /// Project ID + /// The ID of the build. + /// + /// + /// The cancellation token to cancel operation. + public virtual async Task GetBuildReportHtmlContentAsync( + Guid project, + int buildId, + string type = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("45bcaa88-67e1-4042-a035-56d3b4a7d44c"); + object routeValues = new { project = project, buildId = buildId }; + + List> queryParams = new List>(); + if (type != null) + { + queryParams.Add("type", type); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.2-preview.2"), + queryParameters: queryParams, + mediaType: "text/html", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Gets a list of source code repositories. + /// + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of a single repository to get. + /// 'top' for the repositories most relevant for the endpoint. If not set, all repositories are returned. Ignored if 'repository' is set. + /// If set to true, this will limit the set of results and will return a continuation token to continue the query. + /// When paging results, this is a continuation token, returned by a previous call to this method, that can be used to return the next set of repositories. + /// + /// The cancellation token to cancel operation. + public virtual Task ListRepositoriesAsync( + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + ResultSet? resultSet = null, + bool? pageResults = null, + string continuationToken = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d44d1680-f978-4834-9b93-8c6e132329c9"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (resultSet != null) + { + queryParams.Add("resultSet", resultSet.Value.ToString()); + } + if (pageResults != null) + { + queryParams.Add("pageResults", pageResults.Value.ToString()); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of source code repositories. + /// + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of a single repository to get. + /// 'top' for the repositories most relevant for the endpoint. If not set, all repositories are returned. Ignored if 'repository' is set. + /// If set to true, this will limit the set of results and will return a continuation token to continue the query. + /// When paging results, this is a continuation token, returned by a previous call to this method, that can be used to return the next set of repositories. + /// + /// The cancellation token to cancel operation. + public virtual Task ListRepositoriesAsync( + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + ResultSet? resultSet = null, + bool? pageResults = null, + string continuationToken = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d44d1680-f978-4834-9b93-8c6e132329c9"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + if (resultSet != null) + { + queryParams.Add("resultSet", resultSet.Value.ToString()); + } + if (pageResults != null) + { + queryParams.Add("pageResults", pageResults.Value.ToString()); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> AuthorizeDefinitionResourcesAsync( + IEnumerable resources, + string project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("ea623316-1967-45eb-89ab-e9e6110cf2d6"); + object routeValues = new { project = project, definitionId = definitionId }; + HttpContent content = new ObjectContent>(resources, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> AuthorizeDefinitionResourcesAsync( + IEnumerable resources, + Guid project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("ea623316-1967-45eb-89ab-e9e6110cf2d6"); + object routeValues = new { project = project, definitionId = definitionId }; + HttpContent content = new ObjectContent>(resources, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionResourcesAsync( + string project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("ea623316-1967-45eb-89ab-e9e6110cf2d6"); + object routeValues = new { project = project, definitionId = definitionId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionResourcesAsync( + Guid project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("ea623316-1967-45eb-89ab-e9e6110cf2d6"); + object routeValues = new { project = project, definitionId = definitionId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets information about build resources in the system. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetResourceUsageAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("3813d06c-9e36-4ea1-aac3-61a485d60e3d"); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all revisions of a definition. + /// + /// Project ID or project name + /// The ID of the definition. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionRevisionsAsync( + string project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7c116775-52e5-453e-8c5d-914d9762d8c4"); + object routeValues = new { project = project, definitionId = definitionId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all revisions of a definition. + /// + /// Project ID + /// The ID of the definition. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionRevisionsAsync( + Guid project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7c116775-52e5-453e-8c5d-914d9762d8c4"); + object routeValues = new { project = project, definitionId = definitionId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the build settings. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildSettingsAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d"); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the build settings. + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildSettingsAsync( + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d"); + object routeValues = new { project = project }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the build settings. + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildSettingsAsync( + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d"); + object routeValues = new { project = project }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Updates the build settings. + /// + /// The new settings. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateBuildSettingsAsync( + BuildSettings settings, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d"); + HttpContent content = new ObjectContent(settings, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates the build settings. + /// + /// The new settings. + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateBuildSettingsAsync( + BuildSettings settings, + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(settings, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates the build settings. + /// + /// The new settings. + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateBuildSettingsAsync( + BuildSettings settings, + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("aa8c1c9c-ef8b-474a-b8c4-785c7b191d0d"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(settings, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Get a list of source providers and their capabilities. + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task> ListSourceProvidersAsync( + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("3ce81729-954f-423d-a581-9fea01d25186"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of source providers and their capabilities. + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task> ListSourceProvidersAsync( + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("3ce81729-954f-423d-a581-9fea01d25186"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API]

Gets the build status for a definition, optionally scoped to a specific branch, stage, job, and configuration.

If there are more than one, then it is required to pass in a value when specifying a , and the same rule then applies for both if passing a parameter.

+ ///
+ /// Project ID or project name + /// Either the definition name with optional leading folder path, or the definition id. + /// Only consider the most recent build for this branch. + /// Use this stage within the pipeline to render the status. + /// Use this job within a stage of the pipeline to render the status. + /// Use this job configuration to render the status + /// Replaces the default text on the left side of the badge. + /// + /// The cancellation token to cancel operation. + public virtual Task GetStatusBadgeAsync( + string project, + string definition, + string branchName = null, + string stageName = null, + string jobName = null, + string configuration = null, + string label = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("07acfdce-4757-4439-b422-ddd13a2fcc10"); + object routeValues = new { project = project, definition = definition }; + + List> queryParams = new List>(); + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + if (stageName != null) + { + queryParams.Add("stageName", stageName); + } + if (jobName != null) + { + queryParams.Add("jobName", jobName); + } + if (configuration != null) + { + queryParams.Add("configuration", configuration); + } + if (label != null) + { + queryParams.Add("label", label); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API]

Gets the build status for a definition, optionally scoped to a specific branch, stage, job, and configuration.

If there are more than one, then it is required to pass in a value when specifying a , and the same rule then applies for both if passing a parameter.

+ ///
+ /// Project ID + /// Either the definition name with optional leading folder path, or the definition id. + /// Only consider the most recent build for this branch. + /// Use this stage within the pipeline to render the status. + /// Use this job within a stage of the pipeline to render the status. + /// Use this job configuration to render the status + /// Replaces the default text on the left side of the badge. + /// + /// The cancellation token to cancel operation. + public virtual Task GetStatusBadgeAsync( + Guid project, + string definition, + string branchName = null, + string stageName = null, + string jobName = null, + string configuration = null, + string label = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("07acfdce-4757-4439-b422-ddd13a2fcc10"); + object routeValues = new { project = project, definition = definition }; + + List> queryParams = new List>(); + if (branchName != null) + { + queryParams.Add("branchName", branchName); + } + if (stageName != null) + { + queryParams.Add("stageName", stageName); + } + if (jobName != null) + { + queryParams.Add("jobName", jobName); + } + if (configuration != null) + { + queryParams.Add("configuration", configuration); + } + if (label != null) + { + queryParams.Add("label", label); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Adds a tag to a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The tag to add. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddBuildTagAsync( + string project, + int buildId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Adds a tag to a build. + /// + /// Project ID + /// The ID of the build. + /// The tag to add. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddBuildTagAsync( + Guid project, + int buildId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Adds tags to a build. + /// + /// The tags to add. + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddBuildTagsAsync( + IEnumerable tags, + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent>(tags, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Adds tags to a build. + /// + /// The tags to add. + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddBuildTagsAsync( + IEnumerable tags, + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId }; + HttpContent content = new ObjectContent>(tags, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Removes a tag from a build. + /// + /// Project ID or project name + /// The ID of the build. + /// The tag to remove. + /// + /// The cancellation token to cancel operation. + public virtual Task> DeleteBuildTagAsync( + string project, + int buildId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Removes a tag from a build. + /// + /// Project ID + /// The ID of the build. + /// The tag to remove. + /// + /// The cancellation token to cancel operation. + public virtual Task> DeleteBuildTagAsync( + Guid project, + int buildId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the tags for a build. + /// + /// Project ID or project name + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildTagsAsync( + string project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the tags for a build. + /// + /// Project ID + /// The ID of the build. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetBuildTagsAsync( + Guid project, + int buildId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6e6114b2-8161-44c8-8f6c-c5505782427f"); + object routeValues = new { project = project, buildId = buildId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Adds a tag to a definition + /// + /// Project ID or project name + /// The ID of the definition. + /// The tag to add. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddDefinitionTagAsync( + string project, + int definitionId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Adds a tag to a definition + /// + /// Project ID + /// The ID of the definition. + /// The tag to add. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddDefinitionTagAsync( + Guid project, + int definitionId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Adds multiple tags to a definition. + /// + /// The tags to add. + /// Project ID or project name + /// The ID of the definition. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddDefinitionTagsAsync( + IEnumerable tags, + string project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId }; + HttpContent content = new ObjectContent>(tags, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Adds multiple tags to a definition. + /// + /// The tags to add. + /// Project ID + /// The ID of the definition. + /// + /// The cancellation token to cancel operation. + public virtual Task> AddDefinitionTagsAsync( + IEnumerable tags, + Guid project, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId }; + HttpContent content = new ObjectContent>(tags, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Removes a tag from a definition. + /// + /// Project ID or project name + /// The ID of the definition. + /// The tag to remove. + /// + /// The cancellation token to cancel operation. + public virtual Task> DeleteDefinitionTagAsync( + string project, + int definitionId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Removes a tag from a definition. + /// + /// Project ID + /// The ID of the definition. + /// The tag to remove. + /// + /// The cancellation token to cancel operation. + public virtual Task> DeleteDefinitionTagAsync( + Guid project, + int definitionId, + string tag, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId, tag = tag }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the tags for a definition. + /// + /// Project ID or project name + /// The ID of the definition. + /// The definition revision number. If not specified, uses the latest revision of the definition. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionTagsAsync( + string project, + int definitionId, + int? revision = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets the tags for a definition. + /// + /// Project ID + /// The ID of the definition. + /// The definition revision number. If not specified, uses the latest revision of the definition. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDefinitionTagsAsync( + Guid project, + int definitionId, + int? revision = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("cb894432-134a-4d31-a839-83beceaace4b"); + object routeValues = new { project = project, definitionId = definitionId }; + + List> queryParams = new List>(); + if (revision != null) + { + queryParams.Add("revision", revision.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of all build and definition tags in the project. + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTagsAsync( + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d84ac5c6-edc7-43d5-adc9-1b34be5dea09"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of all build and definition tags in the project. + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTagsAsync( + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d84ac5c6-edc7-43d5-adc9-1b34be5dea09"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Deletes a build definition template. + /// + /// Project ID or project name + /// The ID of the template. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTemplateAsync( + string project, + string templateId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project, templateId = templateId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Deletes a build definition template. + /// + /// Project ID + /// The ID of the template. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTemplateAsync( + Guid project, + string templateId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project, templateId = templateId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Gets a specific build definition template. + /// + /// Project ID or project name + /// The ID of the requested template. + /// + /// The cancellation token to cancel operation. + public virtual Task GetTemplateAsync( + string project, + string templateId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project, templateId = templateId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a specific build definition template. + /// + /// Project ID + /// The ID of the requested template. + /// + /// The cancellation token to cancel operation. + public virtual Task GetTemplateAsync( + Guid project, + string templateId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project, templateId = templateId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all definition templates. + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTemplatesAsync( + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets all definition templates. + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTemplatesAsync( + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Updates an existing build definition template. + /// + /// The new version of the template. + /// Project ID or project name + /// The ID of the template. + /// + /// The cancellation token to cancel operation. + public virtual Task SaveTemplateAsync( + BuildDefinitionTemplate template, + string project, + string templateId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project, templateId = templateId }; + HttpContent content = new ObjectContent(template, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Updates an existing build definition template. + /// + /// The new version of the template. + /// Project ID + /// The ID of the template. + /// + /// The cancellation token to cancel operation. + public virtual Task SaveTemplateAsync( + BuildDefinitionTemplate template, + Guid project, + string templateId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("e884571e-7f92-4d6a-9274-3f5649900835"); + object routeValues = new { project = project, templateId = templateId }; + HttpContent content = new ObjectContent(template, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 3), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Gets details for a build + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildTimelineAsync( + string project, + int buildId, + Guid? timelineId = null, + int? changeId = null, + Guid? planId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8baac422-4c6e-4de5-8532-db96d92acffa"); + object routeValues = new { project = project, buildId = buildId, timelineId = timelineId }; + + List> queryParams = new List>(); + if (changeId != null) + { + queryParams.Add("changeId", changeId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (planId != null) + { + queryParams.Add("planId", planId.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets details for a build + /// + /// Project ID + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetBuildTimelineAsync( + Guid project, + int buildId, + Guid? timelineId = null, + int? changeId = null, + Guid? planId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8baac422-4c6e-4de5-8532-db96d92acffa"); + object routeValues = new { project = project, buildId = buildId, timelineId = timelineId }; + + List> queryParams = new List>(); + if (changeId != null) + { + queryParams.Add("changeId", changeId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (planId != null) + { + queryParams.Add("planId", planId.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Recreates the webhooks for the specified triggers in the given source code repository. + /// + /// The types of triggers to restore webhooks for. + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get webhooks. Can only be omitted for providers that do not support multiple repositories. + /// + /// The cancellation token to cancel operation. + public virtual async Task RestoreWebhooksAsync( + List triggerTypes, + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("793bceb8-9736-4030-bd2f-fb3ce6d6b478"); + object routeValues = new { project = project, providerName = providerName }; + HttpContent content = new ObjectContent>(triggerTypes, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Recreates the webhooks for the specified triggers in the given source code repository. + /// + /// The types of triggers to restore webhooks for. + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get webhooks. Can only be omitted for providers that do not support multiple repositories. + /// + /// The cancellation token to cancel operation. + public virtual async Task RestoreWebhooksAsync( + List triggerTypes, + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("793bceb8-9736-4030-bd2f-fb3ce6d6b478"); + object routeValues = new { project = project, providerName = providerName }; + HttpContent content = new ObjectContent>(triggerTypes, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Gets a list of webhooks installed in the given source code repository. + /// + /// Project ID or project name + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get webhooks. Can only be omitted for providers that do not support multiple repositories. + /// + /// The cancellation token to cancel operation. + public virtual Task> ListWebhooksAsync( + string project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8f20ff82-9498-4812-9f6e-9c01bdc50e99"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Gets a list of webhooks installed in the given source code repository. + /// + /// Project ID + /// The name of the source provider. + /// If specified, the ID of the service endpoint to query. Can only be omitted for providers that do not use service endpoints, e.g. TFVC or TFGit. + /// If specified, the vendor-specific identifier or the name of the repository to get webhooks. Can only be omitted for providers that do not support multiple repositories. + /// + /// The cancellation token to cancel operation. + public virtual Task> ListWebhooksAsync( + Guid project, + string providerName, + Guid? serviceEndpointId = null, + string repository = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8f20ff82-9498-4812-9f6e-9c01bdc50e99"); + object routeValues = new { project = project, providerName = providerName }; + + List> queryParams = new List>(); + if (serviceEndpointId != null) + { + queryParams.Add("serviceEndpointId", serviceEndpointId.Value.ToString()); + } + if (repository != null) + { + queryParams.Add("repository", repository); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.2, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/IVariableMultiplierExecutionOptions.cs b/src/Sdk/BuildWebApi/Api/IVariableMultiplierExecutionOptions.cs new file mode 100644 index 00000000000..4aa4e41f6d8 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/IVariableMultiplierExecutionOptions.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace GitHub.Build.WebApi +{ + public interface IVariableMultiplierExecutionOptions + { + Int32 MaxConcurrency + { + get; + } + + Boolean ContinueOnError + { + get; + } + + List Multipliers + { + get; + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/Links.cs b/src/Sdk/BuildWebApi/Api/Links.cs new file mode 100644 index 00000000000..428ca001954 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Links.cs @@ -0,0 +1,18 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class Links + { + public const string Avatar = "avatar"; + public const String Self = "self"; + public const String Web = "web"; + public const String Editor = "editor"; + public const String Badge = "badge"; + public const String Timeline = "timeline"; + public const String Details = "details"; + public const String SourceVersionDisplayUri = "sourceVersionDisplayUri"; + } +} diff --git a/src/Sdk/BuildWebApi/Api/MetricAggregationTypes.cs b/src/Sdk/BuildWebApi/Api/MetricAggregationTypes.cs new file mode 100644 index 00000000000..8269b10a8f7 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/MetricAggregationTypes.cs @@ -0,0 +1,19 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class MetricAggregationTypes + { + public const String Hourly = "Hourly"; + public const String Daily = "Daily"; + } + + [Obsolete("Use MetricAggregationTypes instead.")] + public static class WellKnownMetricAggregationTypes + { + public const String Hourly = MetricAggregationTypes.Hourly; + public const String Daily = MetricAggregationTypes.Daily; + } +} diff --git a/src/Sdk/BuildWebApi/Api/PhaseTargetType.cs b/src/Sdk/BuildWebApi/Api/PhaseTargetType.cs new file mode 100644 index 00000000000..126026ece3c --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/PhaseTargetType.cs @@ -0,0 +1,12 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class PhaseTargetType + { + public const Int32 Agent = 1; + public const Int32 Server = 2; + } +} diff --git a/src/Sdk/BuildWebApi/Api/ProcessType.cs b/src/Sdk/BuildWebApi/Api/ProcessType.cs new file mode 100644 index 00000000000..3b91987c64c --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/ProcessType.cs @@ -0,0 +1,29 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class ProcessType + { + public const Int32 Designer = 1; + public const Int32 Yaml = 2; + public const Int32 Docker = 3; + public const Int32 JustInTime = 4; + + public static String GetName(Int32 type) + { + switch (type) + { + case ProcessType.Docker: + return nameof(Docker); + case ProcessType.JustInTime: + return nameof(JustInTime); + case ProcessType.Yaml: + return nameof(Yaml); + default: + return nameof(Designer); + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/ReportTypes.cs b/src/Sdk/BuildWebApi/Api/ReportTypes.cs new file mode 100644 index 00000000000..983cf2565d1 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/ReportTypes.cs @@ -0,0 +1,15 @@ +using System; + +namespace GitHub.Build.WebApi +{ + public static class ReportTypes + { + public const String Html = "Html"; + } + + [Obsolete("Use ReportTypes instead.")] + public static class WellKnownReportTypes + { + public const String Html = ReportTypes.Html; + } +} diff --git a/src/Sdk/BuildWebApi/Api/RepositoryProperties.cs b/src/Sdk/BuildWebApi/Api/RepositoryProperties.cs new file mode 100644 index 00000000000..ef82fe4c100 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/RepositoryProperties.cs @@ -0,0 +1,72 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [Obsolete("Use RepositoryProperties instead.")] + public static class WellKnownRepositoryProperties + { + public const String ApiUrl = RepositoryProperties.ApiUrl; + public const String BranchesUrl = RepositoryProperties.BranchesUrl; + public const String CheckoutNestedSubmodules = RepositoryProperties.CheckoutNestedSubmodules; + public const String CleanOptions = RepositoryProperties.CleanOptions; + public const String CloneUrl = RepositoryProperties.CloneUrl; + public const String ConnectedServiceId = RepositoryProperties.ConnectedServiceId; + public const String FetchDepth = RepositoryProperties.FetchDepth; + public const String Fullname = RepositoryProperties.Fullname; + public const String GitLfsSupport = RepositoryProperties.GitLfsSupport; + public const String LabelSources = RepositoryProperties.LabelSources; + public const String LabelSourcesFormat = RepositoryProperties.LabelSourcesFormat; + public const String Password = RepositoryProperties.Password; + public const String SkipSyncSource = RepositoryProperties.SkipSyncSource; + public const String SvnMapping = RepositoryProperties.SvnMapping; + public const String TfvcMapping = RepositoryProperties.TfvcMapping; + public const String TokenType = RepositoryProperties.TokenType; + public const String Username = RepositoryProperties.Username; + public const String ReportBuildStatus = RepositoryProperties.ReportBuildStatus; + public const String AcceptUntrustedCertificates = RepositoryProperties.AcceptUntrustedCertificates; + } + + [GenerateAllConstants] + public static class RepositoryProperties + { + public const String AcceptUntrustedCertificates = "acceptUntrustedCerts"; + public const String ApiUrl = "apiUrl"; + public const String Archived = "archived"; + public const String BranchesUrl = "branchesUrl"; + public const String CheckoutNestedSubmodules = "checkoutNestedSubmodules"; + public const String CleanOptions = "cleanOptions"; + public const String CloneUrl = "cloneUrl"; + public const String ConnectedServiceId = "connectedServiceId"; + public const String DefaultBranch = "defaultBranch"; + public const String ExternalId = "externalId"; + public const String FetchDepth = "fetchDepth"; + public const String Fullname = "fullName"; + public const String GitLfsSupport = "gitLfsSupport"; + public const String HasAdminPermissions = "hasAdminPermissions"; + public const String IsFork = "isFork"; + public const String IsPrivate = "isPrivate"; + public const String LabelSources = "labelSources"; + public const String LabelSourcesFormat = "labelSourcesFormat"; + public const String Languages = "languages"; + public const String LastUpdated = "lastUpdated"; + public const String ManageUrl = "manageUrl"; + public const String NodeId = "nodeId"; + public const String OwnerAvatarUrl = "ownerAvatarUrl"; + public const String OwnerId = "ownerId"; + public const String OwnerIsAUser = "ownerIsAUser"; + public const String OrgName = "orgName"; + public const String Password = "password"; + public const String PrimaryLanguage = "primaryLanguage"; + public const String RefsUrl = "refsUrl"; + public const String ReportBuildStatus = "reportBuildStatus"; + public const String SafeId = "safeId"; // Used in telemetry, so sensitive information removed (may be a url w/ password) + public const String SafeRepository = "safeRepository"; // Used in telemetry, so sensitive information removed + public const String ShortName = "shortName"; + public const String SkipSyncSource = "skipSyncSource"; + public const String SvnMapping = "svnMapping"; + public const String TfvcMapping = "tfvcMapping"; + public const String TokenType = "tokenType"; + public const String Username = "username"; + } +} diff --git a/src/Sdk/BuildWebApi/Api/RepositoryTypes.cs b/src/Sdk/BuildWebApi/Api/RepositoryTypes.cs new file mode 100644 index 00000000000..df1c868effa --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/RepositoryTypes.cs @@ -0,0 +1,29 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + public static class RepositoryTypes + { + public const String TfsVersionControl = "TfsVersionControl"; + public const String TfsGit = "TfsGit"; + public const String Git = "Git"; + public const String GitHub = "GitHub"; + public const String GitHubEnterprise = "GitHubEnterprise"; + public const String Bitbucket = "Bitbucket"; + public const String Svn = "Svn"; + } + + [Obsolete("Use RepositoryTypes instead.")] + public static class WellKnownRepositoryTypes + { + public const String TfsVersionControl = RepositoryTypes.TfsVersionControl; + public const String TfsGit = RepositoryTypes.TfsGit; + public const String Git = RepositoryTypes.Git; + public const String GitHub = RepositoryTypes.GitHub; + public const String GitHubEnterprise = RepositoryTypes.GitHubEnterprise; + public const String Bitbucket = RepositoryTypes.Bitbucket; + public const String Svn = RepositoryTypes.Svn; + } +} diff --git a/src/Sdk/BuildWebApi/Api/Security.cs b/src/Sdk/BuildWebApi/Api/Security.cs new file mode 100644 index 00000000000..4f1d3c65147 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/Security.cs @@ -0,0 +1,55 @@ +using System; +using System.ComponentModel; +using System.Text; + +namespace GitHub.Build.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class Security + { + /// + /// Gets tokenized path from the given path to fit in to build hierarchical security + /// + /// + /// + public static String GetSecurityTokenPath(String path) + { + if (String.IsNullOrEmpty(path)) + { + // return root path by default + return NamespaceSeparator.ToString(); + } + + String[] components = path.Split(new char[] { '\\', '/' }, StringSplitOptions.RemoveEmptyEntries); + if (components.Length == 0) + { + // for root path + return NamespaceSeparator.ToString(); + } + + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < components.Length; i++) + { +#if !NETSTANDARD + // FileSpec isn't available in NetStandard + String error; + if (!FileSpec.IsLegalNtfsName(components[i], MaxPathNameLength, true, out error)) + { + throw new InvalidPathException(error); + } +#endif + + sb.AppendFormat("{0}{1}", NamespaceSeparator, components[i]); + } + + sb.Append(NamespaceSeparator); + return sb.ToString(); + } + + public static readonly Char NamespaceSeparator = '/'; + public static readonly Int32 MaxPathNameLength = 248; + + public const String BuildNamespaceIdString = "33344D9C-FC72-4d6f-ABA5-FA317101A7E9"; + public static readonly Guid BuildNamespaceId = new Guid(BuildNamespaceIdString); + } +} diff --git a/src/Sdk/BuildWebApi/Api/SerializationHelper.cs b/src/Sdk/BuildWebApi/Api/SerializationHelper.cs new file mode 100644 index 00000000000..5185bd76c19 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/SerializationHelper.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Build.WebApi +{ + internal static class SerializationHelper + { + public static void Copy( + ref List source, + ref List target, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new List(source); + if (clearSource) + { + source = null; + } + } + } + + public static void Copy( + ref IDictionary source, + ref IDictionary target, + IEqualityComparer comparer, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new Dictionary(source, comparer); + if (clearSource) + { + source = null; + } + } + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/ServerTargetExecutionType.cs b/src/Sdk/BuildWebApi/Api/ServerTargetExecutionType.cs new file mode 100644 index 00000000000..d232504669d --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/ServerTargetExecutionType.cs @@ -0,0 +1,11 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi { + + [GenerateAllConstants] + public static class ServerTargetExecutionType { + public const Int32 Normal = 0; + public const Int32 VariableMultipliers = 1; + } +} diff --git a/src/Sdk/BuildWebApi/Api/SettingsSourceType.cs b/src/Sdk/BuildWebApi/Api/SettingsSourceType.cs new file mode 100644 index 00000000000..6d70c0b1c19 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/SettingsSourceType.cs @@ -0,0 +1,14 @@ +using System; +using System.ComponentModel; +using GitHub.Services.Common; + +namespace GitHub.Build.WebApi +{ + [GenerateAllConstants] + [EditorBrowsable(EditorBrowsableState.Never)] + public static class SettingsSourceType + { + public const Int32 Definition = 1; + public const Int32 Process = 2; + } +} diff --git a/src/Sdk/BuildWebApi/Api/TypePropertyJsonConverter.cs b/src/Sdk/BuildWebApi/Api/TypePropertyJsonConverter.cs new file mode 100644 index 00000000000..8a10d1172dc --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/TypePropertyJsonConverter.cs @@ -0,0 +1,114 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.Build.WebApi +{ + internal abstract class TypePropertyJsonConverter : VssSecureJsonConverter where TInstance : class + { + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + var contract = serializer.ContractResolver.ResolveContract(objectType) as JsonObjectContract; + if (contract == null) + { + return existingValue; + } + + var property = contract.Properties.GetClosestMatchProperty("Type"); + if (property == null) + { + return existingValue; + } + + Int32 targetType; + JToken targetTypeValue; + var value = JObject.Load(reader); + + TInstance newValue = GetInstance(objectType); + if (newValue == null) + { + if (!value.TryGetValue(property.PropertyName, StringComparison.OrdinalIgnoreCase, out targetTypeValue)) + { + if (!TryInferType(value, out targetType)) + { + return existingValue; + } + } + else + { + if (targetTypeValue.Type != JTokenType.Integer) + { + return existingValue; + } + else + { + targetType = (Int32)targetTypeValue; + } + } + + newValue = GetInstance(targetType); + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + protected abstract TInstance GetInstance(Int32 targetType); + + protected virtual TInstance GetInstance( + Type objectType) + { + return null; + } + + protected virtual Boolean TryInferType( + JObject value, + out Int32 type) + { + type = 0; + return false; + } + + public override Boolean CanConvert( + Type objectType) + { + return typeof(TInstance).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + // The virtual method returns false for CanWrite so this should never be invoked + throw new NotSupportedException(); + } + } +} diff --git a/src/Sdk/BuildWebApi/Api/WellKnownBuildOptions.cs b/src/Sdk/BuildWebApi/Api/WellKnownBuildOptions.cs new file mode 100644 index 00000000000..039eeb0e3e9 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/WellKnownBuildOptions.cs @@ -0,0 +1,11 @@ +using System; + +namespace GitHub.Build.WebApi +{ + [Obsolete("No longer used.")] + public static class WellKnownBuildOptions + { + public static readonly Guid CreateDrop = Guid.Parse("{E8B30F6F-039D-4D34-969C-449BBE9C3B9E}"); + public static readonly Guid CopyToStagingFolder = Guid.Parse("{82F9A3E8-3930-482E-AC62-AE3276F284D5}"); + } +} diff --git a/src/Sdk/BuildWebApi/Api/WellKnownDataProviderKeys.cs b/src/Sdk/BuildWebApi/Api/WellKnownDataProviderKeys.cs new file mode 100644 index 00000000000..bdd412b5161 --- /dev/null +++ b/src/Sdk/BuildWebApi/Api/WellKnownDataProviderKeys.cs @@ -0,0 +1,44 @@ +using System; +using System.ComponentModel; + +namespace GitHub.Build.WebApi +{ + // moved to WebAccess/Build.Plugins + [Obsolete] + [EditorBrowsable(EditorBrowsableState.Never)] + public static class WellKnownDataProviderKeys + { + // Extensions + public const String MyDefinitions = "TFS.Build.MyDefinitions"; + public const String AllDefinitions = "TFS.Build.AllDefinitions"; + public const String QueuedDefinitions = "TFS.Build.QueuedDefinitions"; + public const String AllBuilds = "TFS.Build.AllBuilds"; + public const String DefinitionSummary = "TFS.Build.DefinitionSummary"; + public const String DefinitionHistory = "TFS.Build.DefinitionHistory"; + public const String DefinitionDeletedHistory = "TFS.Build.DefinitionDeletedHistory"; + + // Resources + public const String Builds = "TFS.Build.Builds"; + public const String Changes = "TFS.Build.Changes"; + public const String Definitions = "TFS.Build.Definitions"; + public const String Folders = "TFS.Build.Folders"; + public const String Queues = "TFS.Build.Queues"; + + // Resources grouped together + public const String BuildHistory = "TFS.Build.BuildHistory"; + + // Settings + public const String NewCIWorkflowOptInState = "TFS.Build.NewCIWorkflowOptInState"; + public const String NewCIWorkflowPreviewFeatureState = "TFS.Build.NewCIWorkflowPreviewFeatureState"; + + // Others + public const String AllDefinitionIds = "TFS.Build.AllDefinitions.DefinitionIds"; + public const String BuildIds = "TFS.Build.Mine.BuildIds"; + public const String HasMyBuilds = "TFS.Build.Mine.HasMyBuilds"; + public const String MyFavoriteDefinitionIds = "TFS.Build.MyFavoriteDefinitionIds"; + public const String TeamFavoriteDefinitionIds = "TFS.Build.TeamFavoriteDefinitionIds"; + + public const String BuildsContinuationToken = "TFS.Build.Builds.ContinuationToken"; + public const String DefinitionsContinuationToken = "TFS.Build.Definitions.ContinuationToken"; + } +} diff --git a/src/Sdk/Common/Common/Authentication/FederatedCredential.cs b/src/Sdk/Common/Common/Authentication/FederatedCredential.cs new file mode 100644 index 00000000000..2a03c63bbfb --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/FederatedCredential.cs @@ -0,0 +1,35 @@ +using System; +using System.Linq; +using System.Net; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// Provides a common implementation for federated credentials. + /// + [Serializable] + public abstract class FederatedCredential : IssuedTokenCredential + { + protected FederatedCredential(IssuedToken initialToken) + : base(initialToken) + { + } + + public override bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (webResponse == null) + { + return false; + } + + if (webResponse.StatusCode == HttpStatusCode.Found || + webResponse.StatusCode == HttpStatusCode.Redirect) + { + return webResponse.Headers.GetValues(HttpHeaders.TfsFedAuthRealm).Any(); + } + + return webResponse.StatusCode == HttpStatusCode.Unauthorized; + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/HttpRequestMessageWrapper.cs b/src/Sdk/Common/Common/Authentication/HttpRequestMessageWrapper.cs new file mode 100644 index 00000000000..ea0a2531356 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/HttpRequestMessageWrapper.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; + +namespace GitHub.Services.Common +{ + internal struct HttpRequestMessageWrapper : IHttpRequest, IHttpHeaders + { + public HttpRequestMessageWrapper(HttpRequestMessage request) + { + m_request = request; + } + + public IHttpHeaders Headers + { + get + { + return this; + } + } + + public Uri RequestUri + { + get + { + return m_request.RequestUri; + } + } + + public IDictionary Properties + { + get + { + return m_request.Properties; + } + } + + IEnumerable IHttpHeaders.GetValues(String name) + { + IEnumerable values; + if (!m_request.Headers.TryGetValues(name, out values)) + { + values = Enumerable.Empty(); + } + return values; + } + + void IHttpHeaders.SetValue( + String name, + String value) + { + m_request.Headers.Remove(name); + m_request.Headers.Add(name, value); + } + + Boolean IHttpHeaders.TryGetValues( + String name, + out IEnumerable values) + { + return m_request.Headers.TryGetValues(name, out values); + } + + private readonly HttpRequestMessage m_request; + } +} diff --git a/src/Sdk/Common/Common/Authentication/HttpResponseMessageWrapper.cs b/src/Sdk/Common/Common/Authentication/HttpResponseMessageWrapper.cs new file mode 100644 index 00000000000..18e03b5b8db --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/HttpResponseMessageWrapper.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; + +namespace GitHub.Services.Common +{ + internal struct HttpResponseMessageWrapper : IHttpResponse, IHttpHeaders + { + public HttpResponseMessageWrapper(HttpResponseMessage response) + { + m_response = response; + } + + public IHttpHeaders Headers + { + get + { + return this; + } + } + + public HttpStatusCode StatusCode + { + get + { + return m_response.StatusCode; + } + } + + IEnumerable IHttpHeaders.GetValues(String name) + { + IEnumerable values; + if (!m_response.Headers.TryGetValues(name, out values)) + { + values = Enumerable.Empty(); + } + return values; + } + + void IHttpHeaders.SetValue( + String name, + String value) + { + throw new NotSupportedException(); + } + + Boolean IHttpHeaders.TryGetValues( + String name, + out IEnumerable values) + { + return m_response.Headers.TryGetValues(name, out values); + } + + private readonly HttpResponseMessage m_response; + } +} diff --git a/src/Sdk/Common/Common/Authentication/IHttpHeaders.cs b/src/Sdk/Common/Common/Authentication/IHttpHeaders.cs new file mode 100644 index 00000000000..8147c75fe1c --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IHttpHeaders.cs @@ -0,0 +1,14 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.Common +{ + public interface IHttpHeaders + { + IEnumerable GetValues(String name); + + void SetValue(String name, String value); + + Boolean TryGetValues(String name, out IEnumerable values); + } +} diff --git a/src/Sdk/Common/Common/Authentication/IHttpRequest.cs b/src/Sdk/Common/Common/Authentication/IHttpRequest.cs new file mode 100644 index 00000000000..802d7275d7c --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IHttpRequest.cs @@ -0,0 +1,23 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.Common +{ + public interface IHttpRequest + { + IHttpHeaders Headers + { + get; + } + + Uri RequestUri + { + get; + } + + IDictionary Properties + { + get; + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/IHttpResponse.cs b/src/Sdk/Common/Common/Authentication/IHttpResponse.cs new file mode 100644 index 00000000000..7bae008632c --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IHttpResponse.cs @@ -0,0 +1,17 @@ +using System.Net; + +namespace GitHub.Services.Common +{ + public interface IHttpResponse + { + IHttpHeaders Headers + { + get; + } + + HttpStatusCode StatusCode + { + get; + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/IVssCredentialPrompt.cs b/src/Sdk/Common/Common/Authentication/IVssCredentialPrompt.cs new file mode 100644 index 00000000000..e9399225091 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IVssCredentialPrompt.cs @@ -0,0 +1,29 @@ +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + /// + /// Provide an interface to get a new token for the credentials. + /// + public interface IVssCredentialPrompt + { + /// + /// Get a new token using the specified provider and the previously failed token. + /// + /// The provider for the token to be retrieved + /// The token which previously failed authentication, if available + /// The new token + Task GetTokenAsync(IssuedTokenProvider provider, IssuedToken failedToken); + + IDictionary Parameters { get; set; } + } + + public interface IVssCredentialPrompts : IVssCredentialPrompt + { + IVssCredentialPrompt FederatedPrompt + { + get; + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/IVssCredentialStorage.cs b/src/Sdk/Common/Common/Authentication/IVssCredentialStorage.cs new file mode 100644 index 00000000000..7a1094637bd --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IVssCredentialStorage.cs @@ -0,0 +1,23 @@ +using System; + +namespace GitHub.Services.Common +{ + public interface IVssCredentialStorage + { + IssuedToken RetrieveToken( + Uri serverUrl, + VssCredentialsType credentialsType); + + void StoreToken( + Uri serverUrl, + IssuedToken token); + + void RemoveToken( + Uri serverUrl, + IssuedToken token); + + bool RemoveTokenValue( + Uri serverUrl, + IssuedToken token); + } +} diff --git a/src/Sdk/Common/Common/Authentication/IssuedToken.cs b/src/Sdk/Common/Common/Authentication/IssuedToken.cs new file mode 100644 index 00000000000..23aa71676f5 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IssuedToken.cs @@ -0,0 +1,113 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// Provides a common base class for issued tokens. + /// + [Serializable] + public abstract class IssuedToken + { + internal IssuedToken() + { + } + + /// + /// Gets a value indicating whether or not this token has been successfully authenticated with the remote + /// server. + /// + public bool IsAuthenticated + { + get + { + return m_authenticated == 1; + } + } + + protected internal abstract VssCredentialsType CredentialType + { + get; + } + + /// + /// True if the token is retrieved from token storage. + /// + internal bool FromStorage + { + get; + set; + } + + /// + /// Metadata about the token in a collection of properties. + /// + /// + public IDictionary Properties + { + get; + set; + } + + /// + /// Id of the owner of the token. + /// + internal Guid UserId + { + get; + set; + } + + /// + /// Name of the owner of the token. + /// + internal string UserName + { + get; + set; + } + + /// + /// Invoked when the issued token has been validated by successfully authenticated with the remote server. + /// + internal bool Authenticated() + { + return Interlocked.CompareExchange(ref m_authenticated, 1, 0) == 0; + } + + /// + /// Get the value of the HttpHeaders.VssUserData response header and + /// populate the UserId and UserName properties. + /// + internal void GetUserData(IHttpResponse response) + { + IEnumerable headerValues; + if (response.Headers.TryGetValues(HttpHeaders.VssUserData, out headerValues)) + { + string userData = headerValues.FirstOrDefault(); + + if (!string.IsNullOrWhiteSpace(userData)) + { + string[] split = userData.Split(':'); + + if (split.Length >= 2) + { + UserId = Guid.Parse(split[0]); + UserName = split[1]; + } + } + } + } + + /// + /// Applies the token to the HTTP request message. + /// + /// The HTTP request message + internal abstract void ApplyTo(IHttpRequest request); + + private int m_authenticated; + } +} diff --git a/src/Sdk/Common/Common/Authentication/IssuedTokenCredential.cs b/src/Sdk/Common/Common/Authentication/IssuedTokenCredential.cs new file mode 100644 index 00000000000..1bed7d72002 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IssuedTokenCredential.cs @@ -0,0 +1,148 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + /// + /// Provides a common base class for issued token credentials. + /// + [Serializable] + public abstract class IssuedTokenCredential + { + protected IssuedTokenCredential(IssuedToken initialToken) + { + InitialToken = initialToken; + } + + public abstract VssCredentialsType CredentialType + { + get; + } + + /// + /// The initial token to use to authenticate if available. + /// + internal IssuedToken InitialToken + { + get; + set; + } + + /// + /// Gets or sets the synchronization context which should be used for UI prompts. + /// + internal TaskScheduler Scheduler + { + get + { + return m_scheduler; + } + set + { + m_scheduler = value; + } + } + + /// + /// The credentials prompt which is used for retrieving a new token. + /// + internal IVssCredentialPrompt Prompt + { + get + { + return m_prompt; + } + set + { + m_prompt = value; + } + } + + internal IVssCredentialStorage Storage + { + get + { + return m_storage; + } + set + { + m_storage = value; + } + } + + /// + /// The base url for the vssconnection to be used in the token storage key. + /// + internal Uri TokenStorageUrl { get; set; } + + /// + /// Creates a token provider suitable for handling the challenge presented in the response. + /// + /// The targeted server + /// The challenge response + /// The failed token + /// An issued token provider instance + internal IssuedTokenProvider CreateTokenProvider( + Uri serverUrl, + IHttpResponse response, + IssuedToken failedToken) + { + if (response != null && !IsAuthenticationChallenge(response)) + { + throw new InvalidOperationException(); + } + + if (InitialToken == null && Storage != null) + { + if (TokenStorageUrl == null) + { + throw new InvalidOperationException($"The {nameof(TokenStorageUrl)} property must have a value if the {nameof(Storage)} property is set on this instance of {GetType().Name}."); + } + InitialToken = Storage.RetrieveToken(TokenStorageUrl, CredentialType); + } + + IssuedTokenProvider provider = OnCreateTokenProvider(serverUrl, response); + if (provider != null) + { + provider.TokenStorageUrl = TokenStorageUrl; + } + + // If the initial token is the one which failed to authenticate, don't + // use it again and let the token provider get a new token. + if (provider != null) + { + if (InitialToken != null && !Object.ReferenceEquals(InitialToken, failedToken)) + { + provider.CurrentToken = InitialToken; + } + } + + return provider; + } + + internal virtual string GetAuthenticationChallenge(IHttpResponse webResponse) + { + IEnumerable values; + if (!webResponse.Headers.TryGetValues(Internal.HttpHeaders.WwwAuthenticate, out values)) + { + return String.Empty; + } + + return String.Join(", ", values); + } + + public abstract bool IsAuthenticationChallenge(IHttpResponse webResponse); + + protected abstract IssuedTokenProvider OnCreateTokenProvider(Uri serverUrl, IHttpResponse response); + + [NonSerialized] + private TaskScheduler m_scheduler; + + [NonSerialized] + private IVssCredentialPrompt m_prompt; + + [NonSerialized] + private IVssCredentialStorage m_storage; + } +} diff --git a/src/Sdk/Common/Common/Authentication/IssuedTokenProvider.cs b/src/Sdk/Common/Common/Authentication/IssuedTokenProvider.cs new file mode 100644 index 00000000000..550bbcb5c5e --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/IssuedTokenProvider.cs @@ -0,0 +1,545 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common.Diagnostics; + +namespace GitHub.Services.Common +{ + internal interface ISupportSignOut + { + void SignOut(Uri serverUrl, Uri replyToUrl, string identityProvider); + } + + /// + /// Provides a common base class for providers of the token authentication model. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class IssuedTokenProvider + { + private const double c_slowTokenAcquisitionTimeInSeconds = 2.0; + + protected IssuedTokenProvider( + IssuedTokenCredential credential, + Uri serverUrl, + Uri signInUrl) + { + ArgumentUtility.CheckForNull(credential, "credential"); + + this.SignInUrl = signInUrl; + this.Credential = credential; + this.ServerUrl = serverUrl; + + m_thisLock = new object(); + } + + /// + /// Gets the authentication scheme used to create this token provider. + /// + protected virtual String AuthenticationScheme + { + get + { + return String.Empty; + } + } + + /// + /// Gets the authentication parameter or parameters used to create this token provider. + /// + protected virtual String AuthenticationParameter + { + get + { + return String.Empty; + } + } + + /// + /// Gets the credential associated with the provider. + /// + protected internal IssuedTokenCredential Credential + { + get; + } + + internal VssCredentialsType CredentialType => this.Credential.CredentialType; + + /// + /// Gets the current token. + /// + public IssuedToken CurrentToken + { + get; + internal set; + } + + /// + /// Gets a value indicating whether or not a call to get token will require interactivity. + /// + public abstract bool GetTokenIsInteractive + { + get; + } + + /// + /// Gets a value indicating whether or not an ISynchronizeInvoke call is required. + /// + private Boolean InvokeRequired + { + get + { + return this.GetTokenIsInteractive && this.Credential.Scheduler != null; + } + } + + /// + /// Gets the sign-in URL for the token provider. + /// + public Uri SignInUrl { get; private set; } + + protected Uri ServerUrl { get; } + + /// + /// The base url for the vssconnection to be used in the token storage key. + /// + internal Uri TokenStorageUrl { get; set; } + + /// + /// Determines whether the specified web response is an authentication challenge. + /// + /// The web response + /// True if the web response is a challenge for token authentication; otherwise, false + protected internal virtual bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + return this.Credential.IsAuthenticationChallenge(webResponse); + } + + /// + /// Formats the authentication challenge string which this token provider handles. + /// + /// A string representing the handled authentication challenge + internal string GetAuthenticationParameters() + { + if (string.IsNullOrEmpty(this.AuthenticationParameter)) + { + return this.AuthenticationScheme; + } + else + { + return string.Format(CultureInfo.InvariantCulture, this.AuthenticationScheme, this.AuthenticationParameter); + } + } + + /// + /// Validates the current token if the provided reference is the current token and it + /// has not been validated before. + /// + /// The token which should be validated + /// The web response which used the token + internal void ValidateToken( + IssuedToken token, + IHttpResponse webResponse) + { + if (token == null) + { + return; + } + + lock (m_thisLock) + { + IssuedToken tokenToValidate = OnValidatingToken(token, webResponse); + + if (tokenToValidate.IsAuthenticated) + { + return; + } + + try + { + // Perform validation which may include matching user information from the response + // with that from the stored connection. If user information mismatch, an exception + // will be thrown and the token will not be authenticated, which means if the same + // token is ever used again in a different request it will be revalidated and fail. + tokenToValidate.GetUserData(webResponse); + OnTokenValidated(tokenToValidate); + + // Set the token to be authenticated. + tokenToValidate.Authenticated(); + } + finally + { + // When the token fails validation, we null its reference from the token provider so it + // would not be used again by the consumers of both. Note that we only update the current + // token of the provider if it is the original token being validated, because we do not + // want to overwrite a different token. + if (object.ReferenceEquals(this.CurrentToken, token)) + { + this.CurrentToken = tokenToValidate.IsAuthenticated ? tokenToValidate : null; + } + } + } + } + + /// + /// Invalidates the current token if the provided reference is the current token. + /// + /// The token reference which should be invalidated + internal void InvalidateToken(IssuedToken token) + { + bool invalidated = false; + lock (m_thisLock) + { + if (token != null && object.ReferenceEquals(this.CurrentToken, token)) + { + this.CurrentToken = null; + invalidated = true; + } + } + + if (invalidated) + { + OnTokenInvalidated(token); + } + } + + /// + /// Retrieves a token for the credentials. + /// + /// The token which previously failed authentication, if available + /// The CancellationTokenthat will be assigned to the new task + /// A security token for the current credentials + public async Task GetTokenAsync( + IssuedToken failedToken, + CancellationToken cancellationToken) + { + IssuedToken currentToken = this.CurrentToken; + VssTraceActivity traceActivity = VssTraceActivity.Current; + Stopwatch aadAuthTokenTimer = Stopwatch.StartNew(); + try + { + VssHttpEventSource.Log.AuthenticationStart(traceActivity); + + if (currentToken != null) + { + VssHttpEventSource.Log.IssuedTokenRetrievedFromCache(traceActivity, this, currentToken); + return currentToken; + } + else + { + GetTokenOperation operation = null; + try + { + GetTokenOperation operationInProgress; + operation = CreateOperation(traceActivity, failedToken, cancellationToken, out operationInProgress); + if (operationInProgress == null) + { + return await operation.GetTokenAsync(traceActivity).ConfigureAwait(false); + } + else + { + return await operationInProgress.WaitForTokenAsync(traceActivity, cancellationToken).ConfigureAwait(false); + } + } + finally + { + lock (m_thisLock) + { + m_operations.Remove(operation); + } + + operation?.Dispose(); + } + } + } + finally + { + VssHttpEventSource.Log.AuthenticationStop(traceActivity); + + aadAuthTokenTimer.Stop(); + TimeSpan getTokenTime = aadAuthTokenTimer.Elapsed; + + if(getTokenTime.TotalSeconds >= c_slowTokenAcquisitionTimeInSeconds) + { + // It may seem strange to pass the string value of TotalSeconds into this method, but testing + // showed that ETW is persnickety when you register a method in an EventSource that doesn't + // use strings or integers as its parameters. It is easier to simply give the method a string + // than figure out to get ETW to reliably accept a double or TimeSpan. + VssHttpEventSource.Log.AuthorizationDelayed(getTokenTime.TotalSeconds.ToString()); + } + } + } + + /// + /// Retrieves a token for the credentials. + /// + /// The token which previously failed authentication, if available + /// The CancellationTokenthat will be assigned to the new task + /// A security token for the current credentials + protected virtual Task OnGetTokenAsync( + IssuedToken failedToken, + CancellationToken cancellationToken) + { + if (this.Credential.Prompt != null) + { + return this.Credential.Prompt.GetTokenAsync(this, failedToken); + } + else + { + return Task.FromResult(null); + } + } + + /// + /// Invoked when the current token is being validated. When overriden in a derived class, + /// validate and return the validated token. + /// + /// Is called inside a lock in ValidateToken + /// The token to validate + /// The web response which used the token + /// The validated token + protected virtual IssuedToken OnValidatingToken( + IssuedToken token, + IHttpResponse webResponse) + { + return token; + } + + protected virtual void OnTokenValidated(IssuedToken token) + { + // Store the validated token to the token storage if it is not originally from there. + if (!token.FromStorage && TokenStorageUrl != null) + { + Credential.Storage?.StoreToken(TokenStorageUrl, token); + } + + VssHttpEventSource.Log.IssuedTokenValidated(VssTraceActivity.Current, this, token); + } + + protected virtual void OnTokenInvalidated(IssuedToken token) + { + if (Credential.Storage != null && TokenStorageUrl != null) + { + Credential.Storage.RemoveTokenValue(TokenStorageUrl, token); + } + + VssHttpEventSource.Log.IssuedTokenInvalidated(VssTraceActivity.Current, this, token); + } + + private GetTokenOperation CreateOperation( + VssTraceActivity traceActivity, + IssuedToken failedToken, + CancellationToken cancellationToken, + out GetTokenOperation operationInProgress) + { + operationInProgress = null; + GetTokenOperation operation = null; + lock (m_thisLock) + { + if (m_operations == null) + { + m_operations = new List(); + } + + // Grab the main operation which is doing the work (if any) + if (m_operations.Count > 0) + { + operationInProgress = m_operations[0]; + + // Use the existing completion source when creating the new operation + operation = new GetTokenOperation(traceActivity, this, failedToken, cancellationToken, operationInProgress.CompletionSource); + } + else + { + operation = new GetTokenOperation(traceActivity, this, failedToken, cancellationToken); + } + + m_operations.Add(operation); + } + + return operation; + } + + private object m_thisLock; + private List m_operations; + + private class DisposableTaskCompletionSource : TaskCompletionSource, IDisposable + { + public DisposableTaskCompletionSource() + { + this.Task.ConfigureAwait(false).GetAwaiter().OnCompleted(() => { m_completed = true; }); + } + + ~DisposableTaskCompletionSource() + { + TraceErrorIfNotCompleted(); + } + + public void Dispose() + { + if (m_disposed) + { + return; + } + + TraceErrorIfNotCompleted(); + + m_disposed = true; + GC.SuppressFinalize(this); + } + + private void TraceErrorIfNotCompleted() + { + if (!m_completed) + { + VssHttpEventSource.Log.TokenSourceNotCompleted(); + } + } + + private Boolean m_disposed; + private Boolean m_completed; + } + + private sealed class GetTokenOperation : IDisposable + { + public GetTokenOperation( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken failedToken, + CancellationToken cancellationToken) + : this(activity, provider, failedToken, cancellationToken, new DisposableTaskCompletionSource(), true) + { + } + + public GetTokenOperation( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken failedToken, + CancellationToken cancellationToken, + DisposableTaskCompletionSource completionSource, + Boolean ownsCompletionSource = false) + { + this.Provider = provider; + this.ActivityId = activity?.Id ?? Guid.Empty; + this.FailedToken = failedToken; + this.CancellationToken = cancellationToken; + this.CompletionSource = completionSource; + this.OwnsCompletionSource = ownsCompletionSource; + } + + public Guid ActivityId { get; } + + public CancellationToken CancellationToken { get; } + + public DisposableTaskCompletionSource CompletionSource { get; } + + public Boolean OwnsCompletionSource { get; } + + private IssuedToken FailedToken { get; } + + private IssuedTokenProvider Provider { get; } + + public void Dispose() + { + if (this.OwnsCompletionSource) + { + this.CompletionSource?.Dispose(); + } + } + + public async Task GetTokenAsync(VssTraceActivity traceActivity) + { + IssuedToken token = null; + try + { + VssHttpEventSource.Log.IssuedTokenAcquiring(traceActivity, this.Provider); + if (this.Provider.InvokeRequired) + { + // Post to the UI thread using the scheduler. This may return a new task object which needs + // to be awaited, since once we get to the UI thread there may be nothing to do if someone else + // preempts us. + + // The cancellation token source is used to handle race conditions between scheduling and + // waiting for the UI task to begin execution. The callback is responsible for disposing of + // the token source, since the thought here is that the callback will run eventually as the + // typical reason for not starting execution within the timeout is due to a deadlock with + // the scheduler being used. + var timerTask = new TaskCompletionSource(); + var timeoutTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(3)); + timeoutTokenSource.Token.Register(() => timerTask.SetResult(null), false); + + var uiTask = Task.Factory.StartNew((state) => PostCallback(state, timeoutTokenSource), + this, + this.CancellationToken, + TaskCreationOptions.None, + this.Provider.Credential.Scheduler).Unwrap(); + + var completedTask = await Task.WhenAny(timerTask.Task, uiTask).ConfigureAwait(false); + if (completedTask == uiTask) + { + token = uiTask.Result; + } + } + else + { + token = await this.Provider.OnGetTokenAsync(this.FailedToken, this.CancellationToken).ConfigureAwait(false); + } + + CompletionSource.TrySetResult(token); + return token; + } + catch (Exception exception) + { + // Mark our completion source as failed so other waiters will get notified in all cases + CompletionSource.TrySetException(exception); + throw; + } + finally + { + this.Provider.CurrentToken = token ?? this.FailedToken; + VssHttpEventSource.Log.IssuedTokenAcquired(traceActivity, this.Provider, token); + } + } + + public async Task WaitForTokenAsync( + VssTraceActivity traceActivity, + CancellationToken cancellationToken) + { + IssuedToken token = null; + try + { + + VssHttpEventSource.Log.IssuedTokenWaitStart(traceActivity, this.Provider, this.ActivityId); + token = await Task.Factory.ContinueWhenAll(new Task[] { CompletionSource.Task }, (x) => CompletionSource.Task.Result, cancellationToken).ConfigureAwait(false); + } + finally + { + VssHttpEventSource.Log.IssuedTokenWaitStop(traceActivity, this.Provider, token); + } + + return token; + } + + private static Task PostCallback( + Object state, + CancellationTokenSource timeoutTokenSource) + { + // Make sure that we were not cancelled (timed out) before this callback is invoked. + using (timeoutTokenSource) + { + timeoutTokenSource.CancelAfter(-1); + if (timeoutTokenSource.IsCancellationRequested) + { + return Task.FromResult(null); + } + } + + GetTokenOperation thisPtr = (GetTokenOperation)state; + return thisPtr.Provider.OnGetTokenAsync(thisPtr.FailedToken, thisPtr.CancellationToken); + } + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssBasicCredential.cs b/src/Sdk/Common/Common/Authentication/VssBasicCredential.cs new file mode 100644 index 00000000000..eb984d19fee --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssBasicCredential.cs @@ -0,0 +1,92 @@ +using System; +using System.Linq; +using System.Net; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// Provides a credential for basic authentication against a Visual Studio Service. + /// + public sealed class VssBasicCredential : FederatedCredential + { + /// + /// Initializes a new VssBasicCredential instance with no token specified. + /// + public VssBasicCredential() + : this((VssBasicToken)null) + { + } + + /// + /// Initializes a new VssBasicCredential instance with the specified user name and password. + /// + /// The user name + /// The password + public VssBasicCredential( + string userName, + string password) + : this(new VssBasicToken(new NetworkCredential(userName, password))) + { + } + + /// + /// Initializes a new VssBasicCredential instance with the specified token. + /// + /// An optional token which, if present, should be used before obtaining a new token + public VssBasicCredential(ICredentials initialToken) + : this(new VssBasicToken(initialToken)) + { + } + + /// + /// Initializes a new VssBasicCredential instance with the specified token. + /// + /// An optional token which, if present, should be used before obtaining a new token + public VssBasicCredential(VssBasicToken initialToken) + : base(initialToken) + { + } + + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Basic; + } + } + + public override bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (webResponse == null) + { + return false; + } + + if (webResponse.StatusCode != HttpStatusCode.Found && + webResponse.StatusCode != HttpStatusCode.Redirect && + webResponse.StatusCode != HttpStatusCode.Unauthorized) + { + return false; + } + + return webResponse.Headers.GetValues(HttpHeaders.WwwAuthenticate).Any(x => x.StartsWith("Basic", StringComparison.OrdinalIgnoreCase)); + } + + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + if (serverUrl.Scheme != "https") + { + String unsafeBasicAuthEnv = Environment.GetEnvironmentVariable("VSS_ALLOW_UNSAFE_BASICAUTH") ?? "false"; + if (!Boolean.TryParse(unsafeBasicAuthEnv, out Boolean unsafeBasicAuth) || !unsafeBasicAuth) + { + throw new InvalidOperationException(CommonResources.BasicAuthenticationRequiresSsl()); + } + } + + return new BasicAuthTokenProvider(this, serverUrl); + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssBasicToken.cs b/src/Sdk/Common/Common/Authentication/VssBasicToken.cs new file mode 100644 index 00000000000..ce4142bf2cc --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssBasicToken.cs @@ -0,0 +1,63 @@ +using System; +using System.Globalization; +using System.Net; + +namespace GitHub.Services.Common +{ + /// + /// Provides a token for basic authentication of internet identities. + /// + public sealed class VssBasicToken : IssuedToken + { + /// + /// Initializes a new BasicAuthToken instance with the specified token value. + /// + /// The credentials which should be used for authentication + public VssBasicToken(ICredentials credentials) + { + m_credentials = credentials; + } + + internal ICredentials Credentials + { + get + { + return m_credentials; + } + } + + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Basic; + } + } + + internal override void ApplyTo(IHttpRequest request) + { + var basicCredential = m_credentials.GetCredential(request.RequestUri, "Basic"); + if (basicCredential != null) + { + request.Headers.SetValue(Internal.HttpHeaders.Authorization, "Basic " + FormatBasicAuthHeader(basicCredential)); + } + } + + private static String FormatBasicAuthHeader(NetworkCredential credential) + { + String authHeader = String.Empty; + if (!String.IsNullOrEmpty(credential.Domain)) + { + authHeader = String.Format(CultureInfo.InvariantCulture, "{0}\\{1}:{2}", credential.Domain, credential.UserName, credential.Password); + } + else + { + authHeader = String.Format(CultureInfo.InvariantCulture, "{0}:{1}", credential.UserName, credential.Password); + } + + return Convert.ToBase64String(VssHttpRequestSettings.Encoding.GetBytes(authHeader)); + } + + private readonly ICredentials m_credentials; + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssBasicTokenProvider.cs b/src/Sdk/Common/Common/Authentication/VssBasicTokenProvider.cs new file mode 100644 index 00000000000..051699965ce --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssBasicTokenProvider.cs @@ -0,0 +1,39 @@ +using System; +using System.Net; + +namespace GitHub.Services.Common +{ + internal sealed class BasicAuthTokenProvider : IssuedTokenProvider + { + public BasicAuthTokenProvider( + VssBasicCredential credential, + Uri serverUrl) + : base(credential, serverUrl, serverUrl) + { + } + + protected override String AuthenticationScheme + { + get + { + return "Basic"; + } + } + + public new VssBasicCredential Credential + { + get + { + return (VssBasicCredential)base.Credential; + } + } + + public override Boolean GetTokenIsInteractive + { + get + { + return base.CurrentToken == null; + } + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssCredentials.cs b/src/Sdk/Common/Common/Authentication/VssCredentials.cs new file mode 100644 index 00000000000..92ffd5e5a08 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssCredentials.cs @@ -0,0 +1,611 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// The type of credentials supported natively by the framework + /// + public enum VssCredentialsType + { + Windows = 0, + Federated = 1, + Basic = 2, + ServiceIdentity = 3, + OAuth = 4, + S2S = 5, + Other = 6, + Aad = 7, + } + + /// + /// Provides the ability to control when to show or hide the credential prompt user interface. + /// + public enum CredentialPromptType + { + /// + /// Show the UI only if necessary to obtain credentials. + /// + PromptIfNeeded = 0, + + /// + /// Never show the UI, even if an error occurs. + /// + DoNotPrompt = 2, + } + + /// + /// Provides credentials to use when connecting to a Visual Studio Service. + /// + public class VssCredentials + { + /// + /// Initializes a new VssCredentials instance with default credentials. + /// + public VssCredentials() + : this(true) + { + } + + /// + /// Initializes a new VssCredentials instance with default credentials if specified. + /// + /// True to use default windows credentials; otherwise, false + public VssCredentials(bool useDefaultCredentials) + : this(new WindowsCredential(useDefaultCredentials)) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified windows credential. + /// + /// The windows credential to use for authentication + public VssCredentials(WindowsCredential windowsCredential) + : this(windowsCredential, null) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified windows credential. + /// + /// The windows credential to use for authentication + /// CredentialPromptType.PromptIfNeeded if interactive prompts are allowed, otherwise CredentialProptType.DoNotPrompt + public VssCredentials( + WindowsCredential windowsCredential, + CredentialPromptType promptType) + : this(windowsCredential, null, promptType) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified issued token credential and + /// default windows credential. + /// + /// The federated credential to use for authentication + public VssCredentials(FederatedCredential federatedCredential) + : this(new WindowsCredential(), federatedCredential) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified windows and issued token + /// credential. + /// + /// The windows credential to use for authentication + /// The federated credential to use for authentication + public VssCredentials( + WindowsCredential windowsCredential, + FederatedCredential federatedCredential) + : this(windowsCredential, federatedCredential, EnvironmentUserInteractive + ? CredentialPromptType.PromptIfNeeded : CredentialPromptType.DoNotPrompt) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified windows and issued token + /// credential. + /// + /// The windows credential to use for authentication + /// The federated credential to use for authentication + /// CredentialPromptType.PromptIfNeeded if interactive prompts are allowed, otherwise CredentialProptType.DoNotPrompt + public VssCredentials( + WindowsCredential windowsCredential, + FederatedCredential federatedCredential, + CredentialPromptType promptType) + : this(windowsCredential, federatedCredential, promptType, null) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified windows and issued token + /// credential. + /// + /// The windows credential to use for authentication + /// The federated credential to use for authentication + /// CredentialPromptType.PromptIfNeeded if interactive prompts are allowed; otherwise, CredentialProptType.DoNotPrompt + /// An optional TaskScheduler to ensure credentials prompting occurs on the UI thread + public VssCredentials( + WindowsCredential windowsCredential, + FederatedCredential federatedCredential, + CredentialPromptType promptType, + TaskScheduler scheduler) + : this(windowsCredential, federatedCredential, promptType, scheduler, null) + { + } + + /// + /// Initializes a new VssCredentials instance with the specified windows and issued token + /// credential. + /// + /// The windows credential to use for authentication + /// The federated credential to use for authentication + /// CredentialPromptType.PromptIfNeeded if interactive prompts are allowed; otherwise, CredentialProptType.DoNotPrompt + /// An optional TaskScheduler to ensure credentials prompting occurs on the UI thread + /// An optional IVssCredentialPrompt to perform prompting for credentials + public VssCredentials( + WindowsCredential windowsCredential, + FederatedCredential federatedCredential, + CredentialPromptType promptType, + TaskScheduler scheduler, + IVssCredentialPrompt credentialPrompt) + { + this.PromptType = promptType; + + if (promptType == CredentialPromptType.PromptIfNeeded && scheduler == null) + { + // If we use TaskScheduler.FromCurrentSynchronizationContext() here and this is executing under the UI + // thread, for example from an event handler in a WinForms applications, this TaskScheduler will capture + // the UI SyncrhonizationContext whose MaximumConcurrencyLevel is 1 and only has a single thread to + // execute queued work. Then, if the UI thread invokes one of our synchronous methods that are just + // wrappers that block until the asynchronous overload returns, and if the async Task queues work to + // this TaskScheduler, like GitHub.Services.CommonGetTokenOperation.GetTokenAsync does, + // this will produce an immediate deadlock. It is a much safer choice to use TaskScheduler.Default here + // as it uses the .NET Framework ThreadPool to execute queued work. + scheduler = TaskScheduler.Default; + } + + if (windowsCredential != null) + { + m_windowsCredential = windowsCredential; + m_windowsCredential.Scheduler = scheduler; + m_windowsCredential.Prompt = credentialPrompt; + } + + if (federatedCredential != null) + { + m_federatedCredential = federatedCredential; + m_federatedCredential.Scheduler = scheduler; + m_federatedCredential.Prompt = credentialPrompt; + } + + m_thisLock = new object(); + } + + /// + /// Implicitly converts a FederatedCredential instance into a VssCredentials instance. + /// + /// The federated credential instance + /// A new VssCredentials instance which wraps the specified credential + public static implicit operator VssCredentials(FederatedCredential credential) + { + return new VssCredentials(credential); + } + + /// + /// Implicitly converts a WindowsCredential instance into a VssCredentials instance. + /// + /// The windows credential instance + /// A new VssCredentials instance which wraps the specified credential + public static implicit operator VssCredentials(WindowsCredential credential) + { + return new VssCredentials(credential); + } + + /// + /// Gets or sets a value indicating whether or not interactive prompts are allowed. + /// + public CredentialPromptType PromptType + { + get + { + return m_promptType; + } + set + { + if (value == CredentialPromptType.PromptIfNeeded && !EnvironmentUserInteractive) + { + throw new ArgumentException(CommonResources.CannotPromptIfNonInteractive(), "PromptType"); + } + + m_promptType = value; + } + } + + /// + /// Gets or sets a value indicating the issued token credentials to use for authentication with the server. + /// + public FederatedCredential Federated + { + get + { + return m_federatedCredential; + } + } + + /// + /// Gets the windows credential to use for NTLM authentication with the server. + /// + public WindowsCredential Windows + { + get + { + return m_windowsCredential; + } + } + + /// + /// A pluggable credential store. + /// Simply assign a storage implementation to this property + /// and the VssCredentials will use it to store and retrieve tokens + /// during authentication. + /// + public IVssCredentialStorage Storage + { + get + { + return m_credentialStorage; + } + set + { + m_credentialStorage = value; + + if (m_windowsCredential != null) + { + m_windowsCredential.Storage = value; + } + + if (m_federatedCredential != null) + { + m_federatedCredential.Storage = value; + } + } + } + + /// + ///Attempts to find appropriate Access token for IDE user and add to prompt's parameter + /// Actual implementation in override. + /// + internal virtual bool TryGetValidAdalToken(IVssCredentialPrompt prompt) + { + return false; + } + + /// + /// Creates a token provider for the configured issued token credentials. + /// + /// The targeted server + /// The failed web response + /// The failed token + /// A provider for retrieving tokens for the configured credential + internal IssuedTokenProvider CreateTokenProvider( + Uri serverUrl, + IHttpResponse webResponse, + IssuedToken failedToken) + { + ArgumentUtility.CheckForNull(serverUrl, "serverUrl"); + + IssuedTokenProvider tokenProvider = null; + VssTraceActivity traceActivity = VssTraceActivity.Current; + lock (m_thisLock) + { + tokenProvider = m_currentProvider; + if (tokenProvider == null || !tokenProvider.IsAuthenticationChallenge(webResponse)) + { + // Prefer federated authentication over Windows authentication. + if (m_federatedCredential != null && m_federatedCredential.IsAuthenticationChallenge(webResponse)) + { + if (tokenProvider != null) + { + VssHttpEventSource.Log.IssuedTokenProviderRemoved(traceActivity, tokenProvider); + } + + // TODO: This needs to be refactored or renamed to be more generic ... + this.TryGetValidAdalToken(m_federatedCredential.Prompt); + + tokenProvider = m_federatedCredential.CreateTokenProvider(serverUrl, webResponse, failedToken); + + if (tokenProvider != null) + { + VssHttpEventSource.Log.IssuedTokenProviderCreated(traceActivity, tokenProvider); + } + } + else if (m_windowsCredential != null && m_windowsCredential.IsAuthenticationChallenge(webResponse)) + { + if (tokenProvider != null) + { + VssHttpEventSource.Log.IssuedTokenProviderRemoved(traceActivity, tokenProvider); + } + + tokenProvider = m_windowsCredential.CreateTokenProvider(serverUrl, webResponse, failedToken); + + if (tokenProvider != null) + { + VssHttpEventSource.Log.IssuedTokenProviderCreated(traceActivity, tokenProvider); + } + } + + m_currentProvider = tokenProvider; + } + + return tokenProvider; + } + } + + /// + /// Retrieves the token provider for the provided server URL if one has been created. + /// + /// The targeted server + /// Stores the active token provider, if one exists + /// True if a token provider was found, false otherwise + public bool TryGetTokenProvider( + Uri serverUrl, + out IssuedTokenProvider provider) + { + ArgumentUtility.CheckForNull(serverUrl, "serverUrl"); + + lock (m_thisLock) + { + // Ensure that we attempt to use the most appropriate authentication mechanism by default. + if (m_currentProvider == null) + { + if (m_federatedCredential != null) + { + m_currentProvider = m_federatedCredential.CreateTokenProvider(serverUrl, null, null); + } + + if (m_currentProvider == null && m_windowsCredential != null) + { + m_currentProvider = m_windowsCredential.CreateTokenProvider(serverUrl, null, null); + } + + if (m_currentProvider != null) + { + VssHttpEventSource.Log.IssuedTokenProviderCreated(VssTraceActivity.Current, m_currentProvider); + } + } + + provider = m_currentProvider; + } + + return provider != null; + } + + /// + /// Determines if the web response is an authentication redirect for issued token providers. + /// + /// The web response + /// True if this is an token authentication redirect, false otherwise + internal bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (webResponse == null) + { + return false; + } + + bool isChallenge = false; + if (m_windowsCredential != null) + { + isChallenge = m_windowsCredential.IsAuthenticationChallenge(webResponse); + } + + if (!isChallenge && m_federatedCredential != null) + { + isChallenge = m_federatedCredential.IsAuthenticationChallenge(webResponse); + } + + return isChallenge; + } + + internal void SignOut( + Uri serverUrl, + Uri serviceLocation, + string identityProvider) + { + // Remove the token in the storage and the current token provider. Note that we don't + // call InvalidateToken here because we want to remove the whole token not just its value + if ((m_currentProvider != null) && (m_currentProvider.CurrentToken != null)) + { + if (m_currentProvider.Credential.Storage != null && m_currentProvider.TokenStorageUrl != null) + { + m_currentProvider.Credential.Storage.RemoveToken(m_currentProvider.TokenStorageUrl, m_currentProvider.CurrentToken); + } + m_currentProvider.CurrentToken = null; + } + + // We need to make sure that the current provider actually supports the signout method + ISupportSignOut tokenProviderWithSignOut = m_currentProvider as ISupportSignOut; + if (tokenProviderWithSignOut == null) + { + return; + } + + // Replace the parameters from the service location + if (serviceLocation != null) + { + string serviceLocationUri = serviceLocation.AbsoluteUri; + serviceLocationUri = serviceLocationUri.Replace("{mode}", "SignOut"); + serviceLocationUri = serviceLocationUri.Replace("{redirectUrl}", serverUrl.AbsoluteUri); + serviceLocation = new Uri(serviceLocationUri); + } + + // Now actually signout of the token provider + tokenProviderWithSignOut.SignOut(serviceLocation, serverUrl, identityProvider); + } + +#if !NETSTANDARD + /// + /// Loads stored credentials for the specified server if found. If no credentials are found in the windows + /// credential store for the specified server and options then default credentials are returned. + /// + /// The server location + /// A value indicating whether or not an exact or partial match of the server is required + /// A credentials object populated with stored credentials for the server if found + public static VssCredentials LoadCachedCredentials( + Uri serverUrl, + bool requireExactMatch) + { + return LoadCachedCredentials(null, serverUrl, requireExactMatch); + } + + /// + /// Loads stored credentials for the specified server if found. If no credentials are found for the specified server and options then default credentials are returned. + /// This overload assumes that the credentials are to be stored under the TFS server's registry root + /// + /// An optional application name for isolated credential storage in the registry + /// The server location + /// A value indicating whether or not an exact or partial match of the server is required + /// A credentials object populated with stored credentials for the server if found + [EditorBrowsable(EditorBrowsableState.Never)] + public static VssCredentials LoadCachedCredentials( + string featureRegistryKeyword, + Uri serverUrl, + bool requireExactMatch) + { + ArgumentUtility.CheckForNull(serverUrl, "serverUrl"); + + bool uriKnownToCachedProvider = false; + VssCredentials cred = LoadCachedCredentialsFromRegisteredProviders(serverUrl, out uriKnownToCachedProvider); + + // If one of the registered credential providers had the target URI in its cache but failed to return a valid credential it means + // we should have had a cred but something went wrong (user canceled, user failed, auth source unavailable, etc.). In that case + // we Do Not want to carry on with the fallback to the VS registry/windows store credential caches. Even if that worked to get a + // credential it would put the user in a bad state (having an active, authenticated connection with an unexpected credential type). + if (cred == null && !uriKnownToCachedProvider) + { + WindowsCredential windowsCredential = null; + FederatedCredential federatedCredential = null; + CredentialsCacheManager credentialsCacheManager = new CredentialsCacheManager(); + TfsCredentialCacheEntry cacheEntry = credentialsCacheManager.GetCredentials(featureRegistryKeyword, serverUrl, requireExactMatch, null); + if (cacheEntry != null) + { + if (cacheEntry.NonInteractive) + { + switch (cacheEntry.Type) + { + case CachedCredentialsType.ServiceIdentity: + VssServiceIdentityToken initialToken = null; + string initialTokenValue = ReadAuthorizationToken(cacheEntry.Attributes); + if (!string.IsNullOrEmpty(initialTokenValue)) + { + initialToken = new VssServiceIdentityToken(initialTokenValue); + } + + // Initialize the issued token credential using the stored token if it exists + federatedCredential = new VssServiceIdentityCredential(cacheEntry.Credentials.UserName, + cacheEntry.Credentials.Password, + initialToken); + break; + + case CachedCredentialsType.Windows: + windowsCredential = new WindowsCredential(cacheEntry.Credentials); + break; + } + } + } + + cred = new VssCredentials(windowsCredential ?? new WindowsCredential(true), federatedCredential, CredentialPromptType.DoNotPrompt); + } + + return cred ?? new VssCredentials(new WindowsCredential(true), null, CredentialPromptType.DoNotPrompt); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static VssCredentials LoadCachedCredentialsFromRegisteredProviders(Uri serverUri, out bool knownUri) + { + LoadRegisteredCachedVssCredentialProviders(); + bool uriKnownByAnyProvider = false; + VssCredentials cred = null; + foreach (var pair in m_loadedCachedVssCredentialProviders) + { + bool uriKnownToProvider = false; + cred = pair.Value?.GetCachedCredentials(serverUri, out uriKnownToProvider); + if (cred != null || uriKnownToProvider) + { + uriKnownByAnyProvider |= uriKnownToProvider; + break; + } + } + knownUri = uriKnownByAnyProvider; + return cred; + } + + private static void LoadRegisteredCachedVssCredentialProviders() + { + CredentialsProviderRegistryHelper.LoadCachedVssCredentialProviders(ref m_loadedCachedVssCredentialProviders); + } + private static ConcurrentDictionary m_loadedCachedVssCredentialProviders = new ConcurrentDictionary(); +#endif + + [EditorBrowsable(EditorBrowsableState.Never)] + public static void WriteAuthorizationToken( + string token, + IDictionary attributes) + { + int i = 0; + for (int j = 0; j < token.Length; i++, j += 128) + { + attributes["AuthTokenSegment" + i] = token.Substring(j, Math.Min(128, token.Length - j)); + } + + attributes["AuthTokenSegmentCount"] = i.ToString(CultureInfo.InvariantCulture); + } + + protected static string ReadAuthorizationToken(IDictionary attributes) + { + string authTokenCountValue; + if (attributes.TryGetValue("AuthTokenSegmentCount", out authTokenCountValue)) + { + int authTokenCount = int.Parse(authTokenCountValue, CultureInfo.InvariantCulture); + + StringBuilder sb = new StringBuilder(); + for (int i = 0; i < authTokenCount; i++) + { + string segmentName = "AuthTokenSegment" + i; + + string segmentValue; + if (attributes.TryGetValue(segmentName, out segmentValue)) + { + sb.Append(segmentValue); + } + } + + return sb.ToString(); + } + + return string.Empty; + } + + protected static bool EnvironmentUserInteractive + { + get + { + return Environment.UserInteractive; + } + } + + private object m_thisLock; + private CredentialPromptType m_promptType; + private IssuedTokenProvider m_currentProvider; + protected WindowsCredential m_windowsCredential; + protected FederatedCredential m_federatedCredential; + private IVssCredentialStorage m_credentialStorage; + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssServiceIdentityCredential.cs b/src/Sdk/Common/Common/Authentication/VssServiceIdentityCredential.cs new file mode 100644 index 00000000000..8b22a5616d2 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssServiceIdentityCredential.cs @@ -0,0 +1,164 @@ +using System; +using System.Globalization; +using System.Linq; +using System.Net; +using System.Net.Http; + +namespace GitHub.Services.Common +{ + /// + /// Provides federated authentication as a service identity with a Visual Studio Service. + /// + [Serializable] + public sealed class VssServiceIdentityCredential : FederatedCredential + { + /// + /// Initializes a new VssServiceIdentityCredential instance with the specified user name and password. + /// + /// The user name + /// The password + public VssServiceIdentityCredential( + string userName, + string password) + : this(userName, password, null) + { + } + + /// + /// Initializes a new VssServiceIdentityCredential instance with the specified user name and password. The + /// provided token, if not null, will be used before attempting authentication with the credentials. + /// + /// The user name + /// The password + /// An optional token which, if present, should be used before obtaining a new token + public VssServiceIdentityCredential( + string userName, + string password, + VssServiceIdentityToken initialToken) + : this(userName, password, initialToken, null) + { + } + + /// + /// Initializes a new VssServiceIdentityCredential instance with the specified access token. + /// + /// A token which may be used for authorization as the desired service identity + public VssServiceIdentityCredential(VssServiceIdentityToken token) + : this(null, null, token, null) + { + } + + /// + /// Initializes a new VssServiceIdentityCredential instance with the specified user name and password. The + /// provided token, if not null, will be used before attempting authentication with the credentials. + /// + /// The user name + /// The password + /// An optional token which, if present, should be used before obtaining a new token + /// An optional HttpMessageHandler which if passed will be passed along to the TokenProvider when executing OnCreateTokenProvider + public VssServiceIdentityCredential( + string userName, + string password, + VssServiceIdentityToken initialToken, + DelegatingHandler innerHandler) + : base(initialToken) + { + m_userName = userName; + m_password = password; + m_innerHandler = innerHandler; + } + + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.ServiceIdentity; + } + } + + /// + /// Gets the user name. + /// + public String UserName + { + get + { + return m_userName; + } + } + + /// + /// Gets the password. + /// + internal String Password + { + get + { + return m_password; + } + } + + public override bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (webResponse == null) + { + return false; + } + + if (webResponse.StatusCode == HttpStatusCode.Found || + webResponse.StatusCode == HttpStatusCode.Redirect || + webResponse.StatusCode == HttpStatusCode.Unauthorized) + { + var authRealm = webResponse.Headers.GetValues(Internal.HttpHeaders.TfsFedAuthRealm).FirstOrDefault(); + var authIssuer = webResponse.Headers.GetValues(Internal.HttpHeaders.TfsFedAuthIssuer).FirstOrDefault(); + var wwwAuthenticate = webResponse.Headers.GetValues(Internal.HttpHeaders.WwwAuthenticate); + if (!String.IsNullOrEmpty(authIssuer) && !String.IsNullOrEmpty(authRealm)) + { + return webResponse.StatusCode != HttpStatusCode.Unauthorized || wwwAuthenticate.Any(x => x.StartsWith("TFS-Federated", StringComparison.OrdinalIgnoreCase)); + } + } + + return false; + } + + internal override string GetAuthenticationChallenge(IHttpResponse webResponse) + { + var authRealm = webResponse.Headers.GetValues(Internal.HttpHeaders.TfsFedAuthRealm).FirstOrDefault(); + var authIssuer = webResponse.Headers.GetValues(Internal.HttpHeaders.TfsFedAuthIssuer).FirstOrDefault(); + return string.Format(CultureInfo.InvariantCulture, "TFS-Federated realm={0}, issuer={1}", authRealm, authIssuer); + } + + /// + /// Creates a provider for retrieving security tokens for the provided credentials. + /// + /// An issued token provider for the current credential + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + // The response is only null when attempting to determine the most appropriate token provider to + // use for the connection. The only way we should do anything here is if we have an initial token + // since that means we can present something without making a server call. + if (response == null && base.InitialToken == null) + { + return null; + } + + Uri signInUrl = null; + String realm = string.Empty; + if (response != null) + { + realm = response.Headers.GetValues(Internal.HttpHeaders.TfsFedAuthRealm).FirstOrDefault(); + signInUrl = new Uri(new Uri(response.Headers.GetValues(Internal.HttpHeaders.TfsFedAuthIssuer).FirstOrDefault()).GetLeftPart(UriPartial.Authority)); + } + + return new VssServiceIdentityTokenProvider(this, serverUrl, signInUrl, realm, m_innerHandler); + } + + private readonly String m_userName; + private readonly String m_password; + + [NonSerialized] + private readonly DelegatingHandler m_innerHandler = null; + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssServiceIdentityToken.cs b/src/Sdk/Common/Common/Authentication/VssServiceIdentityToken.cs new file mode 100644 index 00000000000..8d102d06b7d --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssServiceIdentityToken.cs @@ -0,0 +1,114 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// Provides simple web token used for OAuth authentication. + /// + [Serializable] + public sealed class VssServiceIdentityToken : IssuedToken + { + /// + /// Initializes a new VssServiceIdentityToken instance with the specified token value. + /// + /// The token value as a string + public VssServiceIdentityToken(string token) + { + ArgumentUtility.CheckStringForNullOrEmpty(token, "token"); + + m_token = token; + //.ValidFrom = DateTime.UtcNow; + + // Read out the expiration time for the ValidTo field if we can find it + Dictionary nameValues; + if (TryGetNameValues(token, out nameValues)) + { + string expiresOnValue; + if (nameValues.TryGetValue(c_expiresName, out expiresOnValue)) + { + // The time is represented as standard epoch + // base.ValidTo = s_epoch.AddSeconds(Convert.ToUInt64(expiresOnValue, CultureInfo.CurrentCulture)); + } + } + } + + public String Token + { + get + { + return m_token; + } + } + + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.ServiceIdentity; + } + } + + internal override void ApplyTo(IHttpRequest request) + { + request.Headers.SetValue(Internal.HttpHeaders.Authorization, "WRAP access_token=\"" + m_token + "\""); + } + + internal static VssServiceIdentityToken ExtractToken(string responseValue) + { + // Extract the actual token string + string token = UriUtility.UrlDecode(responseValue + .Split('&') + .Single(value => value.StartsWith("wrap_access_token=", StringComparison.OrdinalIgnoreCase)) + .Split('=')[1], VssHttpRequestSettings.Encoding); + + return new VssServiceIdentityToken(token); + } + + internal static bool TryGetNameValues( + string token, + out Dictionary tokenValues) + { + tokenValues = null; + + if (string.IsNullOrEmpty(token)) + { + return false; + } + + tokenValues = + token + .Split('&') + .Aggregate( + new Dictionary(), + (dict, rawNameValue) => + { + if (rawNameValue == string.Empty) + { + return dict; + } + + string[] nameValue = rawNameValue.Split('='); + + if (nameValue.Length != 2) + { + return dict; + } + + if (dict.ContainsKey(nameValue[0]) == true) + { + return dict; + } + + dict.Add(UriUtility.UrlDecode(nameValue[0]), UriUtility.UrlDecode(nameValue[1])); + return dict; + }); + return true; + } + + private string m_token; + private const string c_expiresName = "ExpiresOn"; + } +} diff --git a/src/Sdk/Common/Common/Authentication/VssServiceIdentityTokenProvider.cs b/src/Sdk/Common/Common/Authentication/VssServiceIdentityTokenProvider.cs new file mode 100644 index 00000000000..2748afdc7a2 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/VssServiceIdentityTokenProvider.cs @@ -0,0 +1,201 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + internal sealed class VssServiceIdentityTokenProvider : IssuedTokenProvider + { + public VssServiceIdentityTokenProvider( + VssServiceIdentityCredential credential, + Uri serverUrl, + Uri signInUrl, + string realm, + DelegatingHandler innerHandler) + : this(credential, serverUrl, signInUrl, realm) + { + m_innerHandler = innerHandler; + } + + public VssServiceIdentityTokenProvider( + VssServiceIdentityCredential credential, + Uri serverUrl, + Uri signInUrl, + string realm) + : base(credential, serverUrl, signInUrl) + { + Realm = realm; + } + + protected override string AuthenticationParameter + { + get + { + if (string.IsNullOrEmpty(this.Realm) && this.SignInUrl == null) + { + return string.Empty; + } + else + { + return string.Format(CultureInfo.InvariantCulture, "issuer=\"{0}\", realm=\"{1}\"", this.SignInUrl, this.Realm); + } + } + } + + protected override String AuthenticationScheme + { + get + { + return "TFS-Federated"; + } + } + + /// + /// Gets the simple web token credential from which this provider was created. + /// + public new VssServiceIdentityCredential Credential + { + get + { + return (VssServiceIdentityCredential)base.Credential; + } + } + + /// + /// Gets a value indicating whether or not a call to get token will require interactivity. + /// + public override Boolean GetTokenIsInteractive + { + get + { + return false; + } + } + + /// + /// Gets the realm for the token provider. + /// + public String Realm + { + get; + } + + protected internal override bool IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (!base.IsAuthenticationChallenge(webResponse)) + { + return false; + } + + // This means we were proactively constructed without any connection information. In this case + // we return false to ensure that a new provider is reconstructed with all appropriate configuration + // to retrieve a new token. + if (this.SignInUrl == null) + { + return false; + } + + string authRealm = webResponse.Headers.GetValues(HttpHeaders.TfsFedAuthRealm).FirstOrDefault(); + string authIssuer = webResponse.Headers.GetValues(HttpHeaders.TfsFedAuthIssuer).FirstOrDefault(); + Uri signInUrl = new Uri(new Uri(authIssuer).GetLeftPart(UriPartial.Authority), UriKind.Absolute); + + // Make sure that the values match our stored values. This way if the values change we will be thrown + // away and a new instance with correct values will be constructed. + return this.Realm.Equals(authRealm, StringComparison.OrdinalIgnoreCase) && + Uri.Compare(this.SignInUrl, signInUrl, UriComponents.AbsoluteUri, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) == 0; + } + + /// + /// Issues a request to synchronously retrieve a token for the associated credential. + /// + /// + /// + /// + protected override async Task OnGetTokenAsync( + IssuedToken failedToken, + CancellationToken cancellationToken) + { + if (string.IsNullOrEmpty(this.Credential.UserName) || + string.IsNullOrEmpty(this.Credential.Password)) + { + return null; + } + + VssTraceActivity traceActivity = VssTraceActivity.Current; + using (HttpClient client = new HttpClient(CreateMessageHandler(), false)) + { + client.BaseAddress = this.SignInUrl; + + KeyValuePair[] values = new KeyValuePair[] + { + new KeyValuePair("wrap_name", this.Credential.UserName), + new KeyValuePair("wrap_password", this.Credential.Password), + new KeyValuePair("wrap_scope", this.Realm), + }; + + Uri url = new Uri("WRAPv0.9/", UriKind.Relative); + FormUrlEncodedContent content = new FormUrlEncodedContent(values); + using (HttpResponseMessage response = await client.PostAsync(url, content, cancellationToken).ConfigureAwait(false)) + { + string responseValue = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + if (response.IsSuccessStatusCode) + { + return VssServiceIdentityToken.ExtractToken(responseValue); + } + else + { + VssHttpEventSource.Log.AuthenticationError(traceActivity, this, responseValue); + return null; + } + } + } + } + + private HttpMessageHandler CreateMessageHandler() + { + var retryOptions = new VssHttpRetryOptions() + { + RetryableStatusCodes = + { + VssNetworkHelper.TooManyRequests, + HttpStatusCode.InternalServerError, + }, + }; + + HttpMessageHandler innerHandler; + + if (m_innerHandler != null) + { + if (m_innerHandler.InnerHandler == null) + { + m_innerHandler.InnerHandler = new HttpClientHandler(); + } + + innerHandler = m_innerHandler; + } + else + { + innerHandler = new HttpClientHandler(); + } + + // Inherit proxy setting from VssHttpMessageHandler + var httpClientHandler = innerHandler as HttpClientHandler; + if (httpClientHandler != null && VssHttpMessageHandler.DefaultWebProxy != null) + { + httpClientHandler.Proxy = VssHttpMessageHandler.DefaultWebProxy; + httpClientHandler.UseProxy = true; + } + + return new VssHttpRetryMessageHandler(retryOptions, innerHandler); + } + + private DelegatingHandler m_innerHandler = null; + } +} diff --git a/src/Sdk/Common/Common/Authentication/WindowsCredential.cs b/src/Sdk/Common/Common/Authentication/WindowsCredential.cs new file mode 100644 index 00000000000..e42bec581d8 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/WindowsCredential.cs @@ -0,0 +1,137 @@ +using System; +using System.Linq; +using System.Net; + +namespace GitHub.Services.Common +{ + /// + /// Provides a credential for windows authentication against a Visual Studio Service. + /// + public sealed class WindowsCredential : IssuedTokenCredential + { + /// + /// Initializes a new WindowsCredential instance using a default user interface provider implementation + /// and the default network credentials. + /// + public WindowsCredential() + : this(true) + { + } + + /// + /// Initializes a new WindowsCredential instance using a default user interface provider implementation + /// and the default network credentials, if specified. + /// + /// True if the default credentials should be used; otherwise, false + public WindowsCredential(bool useDefaultCredentials) + : this(useDefaultCredentials ? CredentialCache.DefaultCredentials : null) + { + UseDefaultCredentials = useDefaultCredentials; + } + + /// + /// Initializes a new WindowsCredential instance using a default user interface provider implementation + /// and the specified network credentials. + /// + /// The windows credentials which should be used for authentication + public WindowsCredential(ICredentials credentials) + : this(null) + { + m_credentials = credentials; + UseDefaultCredentials = credentials == CredentialCache.DefaultCredentials; + } + + /// + /// Initializes a new WindowsCredential instance using the specified initial token. + /// + /// An optional token which, if present, should be used before obtaining a new token + public WindowsCredential(WindowsToken initialToken) + : base(initialToken) + { + } + + /// + /// Gets the credentials associated with this windows credential. + /// + public ICredentials Credentials + { + get + { + return m_credentials; + } + set + { + m_credentials = value; + UseDefaultCredentials = Credentials == CredentialCache.DefaultCredentials; + } + } + + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Windows; + } + } + + /// + /// Gets a value indicating what value was passed to WindowsCredential(bool useDefaultCredentials) constructor + /// + public Boolean UseDefaultCredentials + { + get; + private set; + } + + public override Boolean IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (webResponse == null) + { + return false; + } + + if (webResponse.StatusCode == HttpStatusCode.Unauthorized && + webResponse.Headers.GetValues(Internal.HttpHeaders.WwwAuthenticate).Any(x => AuthenticationSchemeValid(x))) + { + return true; + } + + if (webResponse.StatusCode == HttpStatusCode.ProxyAuthenticationRequired && + webResponse.Headers.GetValues(Internal.HttpHeaders.ProxyAuthenticate).Any(x => AuthenticationSchemeValid(x))) + { + return true; + } + + return false; + } + + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + // If we have no idea what kind of credentials we are supposed to be using, don't play a windows token on + // the first request. + if (response == null) + { + return null; + } + + if (m_credentials != null) + { + this.InitialToken = new WindowsToken(m_credentials); + } + + return new WindowsTokenProvider(this, serverUrl); + } + + private static Boolean AuthenticationSchemeValid(String authenticateHeader) + { + return authenticateHeader.StartsWith("Basic", StringComparison.OrdinalIgnoreCase) || + authenticateHeader.StartsWith("Digest", StringComparison.OrdinalIgnoreCase) || + authenticateHeader.StartsWith("Negotiate", StringComparison.OrdinalIgnoreCase) || + authenticateHeader.StartsWith("Ntlm", StringComparison.OrdinalIgnoreCase); + } + + private ICredentials m_credentials; + } +} diff --git a/src/Sdk/Common/Common/Authentication/WindowsToken.cs b/src/Sdk/Common/Common/Authentication/WindowsToken.cs new file mode 100644 index 00000000000..70d79eba338 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/WindowsToken.cs @@ -0,0 +1,39 @@ +using System; +using System.Net; + +namespace GitHub.Services.Common +{ + public sealed class WindowsToken : IssuedToken, ICredentials + { + internal WindowsToken(ICredentials credentials) + { + this.Credentials = credentials; + } + + public ICredentials Credentials + { + get; + } + + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.Windows; + } + } + + internal override void ApplyTo(IHttpRequest request) + { + // Special-cased by the caller because we implement ICredentials + throw new InvalidOperationException(); + } + + NetworkCredential ICredentials.GetCredential( + Uri uri, + String authType) + { + return this.Credentials?.GetCredential(uri, authType); + } + } +} diff --git a/src/Sdk/Common/Common/Authentication/WindowsTokenProvider.cs b/src/Sdk/Common/Common/Authentication/WindowsTokenProvider.cs new file mode 100644 index 00000000000..4725de22b14 --- /dev/null +++ b/src/Sdk/Common/Common/Authentication/WindowsTokenProvider.cs @@ -0,0 +1,40 @@ +using System; +using System.Globalization; +using System.Net; + +namespace GitHub.Services.Common +{ + internal sealed class WindowsTokenProvider : IssuedTokenProvider + { + public WindowsTokenProvider( + WindowsCredential credential, + Uri serverUrl) + : base(credential, serverUrl, serverUrl) + { + } + + protected override String AuthenticationScheme + { + get + { + return String.Format(CultureInfo.InvariantCulture, "{0} {1} {2} {3}", AuthenticationSchemes.Negotiate, AuthenticationSchemes.Ntlm, AuthenticationSchemes.Digest, AuthenticationSchemes.Basic); + } + } + + public new WindowsCredential Credential + { + get + { + return (WindowsCredential)base.Credential; + } + } + + public override Boolean GetTokenIsInteractive + { + get + { + return base.CurrentToken == null; + } + } + } +} diff --git a/src/Sdk/Common/Common/ClientStorage/IVssClientStorage.cs b/src/Sdk/Common/Common/ClientStorage/IVssClientStorage.cs new file mode 100644 index 00000000000..d3529b9102b --- /dev/null +++ b/src/Sdk/Common/Common/ClientStorage/IVssClientStorage.cs @@ -0,0 +1,80 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.Services.Common.ClientStorage +{ + /// + /// An interface for accessing client data stored locally. + /// + [EditorBrowsable(EditorBrowsableState.Never)] // for internal use + public interface IVssClientStorage : IVssClientStorageReader, IVssClientStorageWriter + { + /// + /// Much like the System.IO.Path.Combine method, this method puts together path segments into a path using + /// the appropriate path delimiter. + /// + /// + /// + string PathKeyCombine(params string[] paths); + + /// + /// The path segment delimiter used by this storage mechanism. + /// + char PathSeparator { get; } + } + + /// + /// An interface for reading from local data storage + /// + public interface IVssClientStorageReader + { + /// + /// Reads one entry from the storage. + /// + /// The type to return. + /// This is the path key for the data to retrieve. + /// Returns the value stored at the given path as type T + T ReadEntry(string path); + + /// + /// Reads one entry from the storage. If the entry does not exist or can not be converted to type T, the default value provided will be returned. + /// When T is not a simple type, and there is extra logic to determine the default value, the pattern: ReadEntry<T>(path) && GetDefault(); is + /// preferred, so that method to retrieve the default is not evaluated unless the entry does not exist. + /// + /// The type to return. + /// This is the path key for the data to retrieve. + /// The value to return if the key does not exist or the value can not be converted to type T + /// + T ReadEntry(string path, T defaultValue); + + /// + /// Returns all entries under the path provided whose values can be converted to T. If path = "root\mydata", then this will return all entries where path begins with "root\mydata\". + /// + /// The type for the entries to return. + /// The path pointing to the branch of entries to return. + /// + IDictionary ReadEntries(string path); + } + + /// + /// An interface for writing to local data storage + /// + public interface IVssClientStorageWriter + { + /// + /// Write one entry into the local data storage. + /// + /// This is the key for the data to store. Providing a path allows data to be accessed hierarchicaly. + /// The value to store at the specified path. Setting his to NULL will remove the entry. + void WriteEntry(string path, object value); + + /// + /// Writes a set of entries to the writer, which provides efficiency benefits over writing each entry individually. + /// It also ensures that the either all of the entries are written or in the case of an error, no entries are written. + /// Setting a value to NULL, will remove the entry. + /// + /// + void WriteEntries(IEnumerable> entries); + } +} diff --git a/src/Sdk/Common/Common/ClientStorage/VssFileStorage.cs b/src/Sdk/Common/Common/ClientStorage/VssFileStorage.cs new file mode 100644 index 00000000000..95d6dc1cfea --- /dev/null +++ b/src/Sdk/Common/Common/ClientStorage/VssFileStorage.cs @@ -0,0 +1,623 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.Common.ClientStorage +{ + /// + /// Class providing access to local file storage, so data can persist across processes. + /// + [EditorBrowsable(EditorBrowsableState.Never)] // for internal use + public class VssFileStorage : IVssClientStorage, IDisposable + { + private readonly string m_filePath; + private readonly VssFileStorageReader m_reader; + private readonly IVssClientStorageWriter m_writer; + + private const char c_defaultPathSeparator = '\\'; + private const bool c_defaultIgnoreCaseInPaths = false; + + /// + /// The separator to use between the path segments of the storage keys. + /// + public char PathSeparator { get; } + + /// + /// The StringComparer used to compare keys in the dictionary. + /// + public StringComparer PathComparer { get; } + + /// + /// This constructor should remain private. Use the factory method GetVssLocalFileStorage to ensure we only have one instance per file, + /// which will reduce contention. + /// + /// This file path to store the settings. + /// The separator to use between the path segments of the storage keys. + /// If true the dictionary will use the OrdinalIgnoreCase StringComparer to compare keys. + private VssFileStorage(string filePath, char pathSeparatorForKeys = c_defaultPathSeparator, bool ignoreCaseInPaths = c_defaultIgnoreCaseInPaths) // This constructor should remain private. + { + PathSeparator = pathSeparatorForKeys; + PathComparer = GetAppropriateStringComparer(ignoreCaseInPaths); + m_filePath = filePath; + m_reader = new VssFileStorageReader(m_filePath, pathSeparatorForKeys, PathComparer); + m_writer = new VssFileStorageWriter(m_filePath, pathSeparatorForKeys, PathComparer); + } + + public T ReadEntry(string path) + { + return m_reader.ReadEntry(path); + } + + public T ReadEntry(string path, T defaultValue) + { + return m_reader.ReadEntry(path, defaultValue); + } + + public IDictionary ReadEntries(string pathPrefix) + { + return m_reader.ReadEntries(pathPrefix); + } + + public void WriteEntries(IEnumerable> entries) + { + m_writer.WriteEntries(entries); + m_reader.NotifyChanged(); + } + + public void WriteEntry(string key, object value) + { + m_writer.WriteEntry(key, value); + m_reader.NotifyChanged(); + } + + public void Dispose() + { + m_reader.Dispose(); + } + + public string PathKeyCombine(params string[] paths) + { + StringBuilder combinedPath = new StringBuilder(); + foreach(string segment in paths) + { + if (segment != null) + { + string trimmedSegment = segment.TrimEnd(PathSeparator); + if (trimmedSegment.Length > 0) + { + if (combinedPath.Length > 0) + { + combinedPath.Append(PathSeparator); + } + combinedPath.Append(trimmedSegment); + } + } + } + return combinedPath.ToString(); + } + + private static ConcurrentDictionary s_storages = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + + /// + /// Factory method to get a VssFileStorage instance ensuring that we don't have two instances for the same file. + /// + /// The full path to the storage file. Ensure that the path used is in an appropriately secure location for the data you are storing. + /// The separator to use between the path segments of the storage keys. + /// If true the dictionary will use the OrdinalIgnoreCase StringComparer to compare keys. + /// + public static IVssClientStorage GetVssLocalFileStorage(string fullPath, char pathSeparatorForKeys = c_defaultPathSeparator, bool ignoreCaseInPaths = c_defaultIgnoreCaseInPaths) + { + string normalizedFullPath = Path.GetFullPath(fullPath); + VssFileStorage storage = s_storages.GetOrAdd(normalizedFullPath, (key) => new VssFileStorage(key, pathSeparatorForKeys, ignoreCaseInPaths)); + + // we need to throw on mismatch if the cache contains a conflicting instance + if (storage.PathSeparator != pathSeparatorForKeys) + { + throw new ArgumentException(CommonResources.ConflictingPathSeparatorForVssFileStorage(pathSeparatorForKeys, normalizedFullPath, storage.PathSeparator)); + } + + StringComparer pathComparer = GetAppropriateStringComparer(ignoreCaseInPaths); + { + if (storage.PathComparer != pathComparer) + { + string caseSensitive = "Ordinal"; + string caseInsensitive = "OrdinalIgnoreCase"; + string requested = ignoreCaseInPaths ? caseInsensitive : caseSensitive; + string previous = ignoreCaseInPaths ? caseSensitive : caseInsensitive; + throw new ArgumentException(CommonResources.ConflictingStringComparerForVssFileStorage(requested, normalizedFullPath, previous)); + } + } + +#if DEBUG + Debug.Assert(fullPath.Equals(storage.m_filePath), string.Format("The same storage file is being referenced with different casing. This will cause issues when running in cross patform environments where the file system may be case sensitive. {0} != {1}", storage.m_filePath, normalizedFullPath)); +#endif + return storage; + } + + private static StringComparer GetAppropriateStringComparer(bool ignoreCase) + { + return ignoreCase ? StringComparer.OrdinalIgnoreCase : StringComparer.Ordinal; + } + + /// + /// Gets an instance of a VssLocalFileStorage under the current user directory. + /// + /// This pathSuffix will be combined at the end of the current user data directory for VSS to make a full path. Something like: "%localappdata%\Microsoft\VisualStudio Services\[pathSuffix]" + /// Adds the current product version as a path segment. ...\Microsoft\VisualStudio Services\v[GeneratedVersionInfo.ProductVersion]\[pathSuffix]" + /// The separator to use between the path segments of the storage keys. + /// If true the dictionary will use the OrdinalIgnoreCase StringComparer to compare keys. + /// + public static IVssClientStorage GetCurrentUserVssFileStorage(string pathSuffix, bool storeByVssVersion, char pathSeparatorForKeys = c_defaultPathSeparator, bool ignoreCaseInPaths = c_defaultIgnoreCaseInPaths) + { + return GetVssLocalFileStorage(Path.Combine(storeByVssVersion ? ClientSettingsDirectoryByVersion : ClientSettingsDirectory, pathSuffix), pathSeparatorForKeys, ignoreCaseInPaths); + } + + /// + /// Directory containing the client settings files. + /// + /// This will look something like this: + /// C:\Users\[user]\AppData\Local\Microsoft\VisualStudio Services\v[GeneratedVersionInfo.ProductVersion] + /// + internal static string ClientSettingsDirectoryByVersion + { + get + { + // We purposely do not cache this value. This value needs to change if + // Windows Impersonation is being used. + return Path.Combine(ClientSettingsDirectory, "v" + GeneratedVersionInfo.ProductVersion); + } + } + + /// + /// Directory containing the client settings files. + /// + /// This will look something like this: + /// C:\Users\[user]\AppData\Local\Microsoft\VisualStudio Services + /// + internal static string ClientSettingsDirectory + { + get + { + // We purposely do not cache this value. This value needs to change if + // Windows Impersonation is being used. + + // Check to see if we can find the user's local application data directory. + string subDir = "Microsoft\\VisualStudio Services"; + string path = Environment.GetEnvironmentVariable("localappdata"); + SafeGetFolderPath(Environment.SpecialFolder.LocalApplicationData); + if (string.IsNullOrEmpty(path)) + { + // If the user has never logged onto this box they will not have a local application data directory. + // Check to see if they have a roaming network directory that moves with them. + path = SafeGetFolderPath(Environment.SpecialFolder.ApplicationData); + if (string.IsNullOrEmpty(path)) + { + // The user does not have a roaming network directory either. Just place the cache in the + // common area. + // If we are using the common dir, we might not have access to create a folder under "Microsoft" + // so we just create a top level folder. + path = Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData); + subDir = "Microsoft VisualStudio Services"; + } + } + + Debug.Assert(path != null, "folder path cannot be null"); + return Path.Combine(path, subDir); + } + } + + /// + /// Gets folder path and returns null in case the special folder in question doesn't exist (useful when the user has never logged on, which makes + /// GetFolderPath throw) + /// + /// Folder to retrieve + /// Path if available, null othewise + private static string SafeGetFolderPath(Environment.SpecialFolder specialFolder) + { + try + { + return Environment.GetFolderPath(specialFolder); + } + catch (ArgumentException) + { + return null; + } + } + + private class VssFileStorageReader : VssLocalFile, IVssClientStorageReader, IDisposable + { + private readonly string m_path; + private Dictionary m_settings; + + private readonly FileSystemWatcher m_watcher; + private readonly ReaderWriterLockSlim m_lock; + private long m_completedRefreshId; + private long m_outstandingRefreshId; + + public VssFileStorageReader(string fullPath, char pathSeparator, StringComparer comparer) + : base(fullPath, pathSeparator, comparer) + { + m_path = fullPath; + m_lock = new ReaderWriterLockSlim(LockRecursionPolicy.NoRecursion); + m_completedRefreshId = 0; + m_outstandingRefreshId = 1; + + // Set up the file system watcher + { + string directoryToWatch = Path.GetDirectoryName(m_path); + + if (!Directory.Exists(directoryToWatch)) + { + Directory.CreateDirectory(directoryToWatch); + } + + m_watcher = new FileSystemWatcher(directoryToWatch, Path.GetFileName(m_path)); + m_watcher.IncludeSubdirectories = false; + m_watcher.NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.CreationTime; + m_watcher.Changed += OnCacheFileChanged; + m_watcher.EnableRaisingEvents = true; + } + } + + public T ReadEntry(string path) + { + return ReadEntry(path, default(T)); + } + + public T ReadEntry(string path, T defaultValue) + { + path = NormalizePath(path); + RefreshIfNeeded(); + + Dictionary settings = m_settings; // use a pointer to m_settings, incase m_settings gets set to a new instance during the operation + JRaw value; + if (settings.TryGetValue(path, out value) && value != null) + { + return JsonConvert.DeserializeObject(value.ToString()); + } + return defaultValue; + } + + public IDictionary ReadEntries(string pathPrefix) + { + string prefix = NormalizePath(pathPrefix, true); + RefreshIfNeeded(); + Dictionary settings = m_settings; // use a pointer to m_settings, incase m_settings gets set to a new instance during the operation + Dictionary matchingEntries = new Dictionary(); + foreach (KeyValuePair kvp in settings.Where(kvp => kvp.Key == prefix || kvp.Key.StartsWith(prefix + PathSeparator))) + { + try + { + matchingEntries[kvp.Key] = JsonConvert.DeserializeObject(kvp.Value.ToString()); + } + catch (JsonSerializationException) { } + catch (JsonReaderException) { } + } + return matchingEntries; + } + + private void OnCacheFileChanged(object sender, FileSystemEventArgs e) + { + NotifyChanged(); + } + + public void Dispose() + { + m_watcher.Dispose(); + } + + public void NotifyChanged() + { + using (new ReadLockScope(m_lock)) + { + Interlocked.Increment(ref m_outstandingRefreshId); + } + } + + private void RefreshIfNeeded() + { + long requestedRefreshId; + + using (new ReadLockScope(m_lock)) + { + requestedRefreshId = Interlocked.Read(ref m_outstandingRefreshId); + + if (m_completedRefreshId >= requestedRefreshId) + { + return; + } + } + + Dictionary newSettings; + using (GetNewMutexScope()) + { + if (m_completedRefreshId >= requestedRefreshId) + { + return; + } + newSettings = LoadFile(); + } + + using (new ReadLockScope(m_lock)) + { + if (m_completedRefreshId >= requestedRefreshId) + { + return; + } + } + + using (new WriteLockScope(m_lock)) + { + if (m_completedRefreshId >= requestedRefreshId) + { + return; + } + + m_completedRefreshId = requestedRefreshId; + m_settings = newSettings; + } + } + + private struct ReadLockScope : IDisposable + { + public ReadLockScope(ReaderWriterLockSlim @lock) + { + m_lock = @lock; + + m_lock.EnterReadLock(); + } + + public void Dispose() + { + m_lock.ExitReadLock(); + } + + private readonly ReaderWriterLockSlim m_lock; + } + + private struct WriteLockScope : IDisposable + { + public WriteLockScope(ReaderWriterLockSlim @lock) + { + m_lock = @lock; + m_lock.EnterWriteLock(); + } + + public void Dispose() + { + m_lock.ExitWriteLock(); + } + + private readonly ReaderWriterLockSlim m_lock; + } + } + + private class VssFileStorageWriter : VssLocalFile, IVssClientStorageWriter + { + public VssFileStorageWriter(string fullPath, char pathSeparator, StringComparer comparer) + : base(fullPath, pathSeparator, comparer) + { + } + + public void WriteEntries(IEnumerable> entries) + { + if (entries.Any()) + { + using (GetNewMutexScope()) + { + bool changesMade = false; + Dictionary originalSettings = LoadFile(); + Dictionary newSettings = new Dictionary(PathComparer); + if (originalSettings.Any()) + { + originalSettings.Copy(newSettings); + } + foreach (KeyValuePair kvp in entries) + { + string path = NormalizePath(kvp.Key); + if (kvp.Value != null) + { + JRaw jRawValue = new JRaw(JsonConvert.SerializeObject(kvp.Value)); + if (!newSettings.ContainsKey(path) || !newSettings[path].Equals(jRawValue)) + { + newSettings[path] = jRawValue; + changesMade = true; + } + } + else + { + if (newSettings.Remove(path)) + { + changesMade = true; + } + } + } + if (changesMade) + { + SaveFile(originalSettings, newSettings); + } + } + } + } + + public void WriteEntry(string path, object value) + { + WriteEntries(new KeyValuePair[] { new KeyValuePair(path, value) }); + } + } + + private class VssLocalFile + { + private readonly string m_filePath; + private readonly string m_bckUpFilePath; + private readonly string m_emptyPathSegment; + + public VssLocalFile(string filePath, char pathSeparator, StringComparer comparer) + { + m_filePath = filePath; + PathComparer = comparer; + PathSeparator = pathSeparator; + m_emptyPathSegment = new string(pathSeparator, 2); + FileInfo fileInfo = new FileInfo(m_filePath); + m_bckUpFilePath = Path.Combine(fileInfo.Directory.FullName, "~" + fileInfo.Name); + } + + protected char PathSeparator { get; } + + protected string NormalizePath(string path, bool allowRootPath = false) + { + if (string.IsNullOrEmpty(path) || path[0] != PathSeparator || path.IndexOf(m_emptyPathSegment, StringComparison.Ordinal) >= 0 || (!allowRootPath && path.Length == 1)) + { + throw new ArgumentException(CommonResources.InvalidClientStoragePath(path, PathSeparator), "path"); + } + if (path[path.Length - 1] == PathSeparator) + { + path = path.Substring(0, path.Length - 1); + } + return path; + } + + protected StringComparer PathComparer { get; } + + protected Dictionary LoadFile() + { + Dictionary settings = null; + if (File.Exists(m_filePath)) + { + settings = LoadFile(m_filePath); + } + if ((settings == null || !settings.Any()) && File.Exists(m_bckUpFilePath)) + { + settings = LoadFile(m_bckUpFilePath); + } + return settings ?? new Dictionary(PathComparer); + } + + private Dictionary LoadFile(string path) + { + Dictionary settings = new Dictionary(PathComparer); + try + { + string fileContent; + using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete)) + { + using (var sr = new StreamReader(fs, Encoding.UTF8)) + { + fileContent = sr.ReadToEnd(); + } + } + IReadOnlyDictionary loadedSettings = JsonConvert.DeserializeObject>(fileContent); + if (loadedSettings != null) + { + // Replay the settings into our dictionary one by one so that our uniqueness constraint + // isn't violated based on the StringComparer for this instance. + foreach (KeyValuePair setting in loadedSettings) + { + settings[setting.Key] = setting.Value; + } + } + } + catch (DirectoryNotFoundException) { } + catch (FileNotFoundException) { } + catch (JsonReaderException) { } + catch (JsonSerializationException) { } + catch (InvalidCastException) { } + + return settings; + } + + protected void SaveFile(IDictionary originalSettings, IDictionary newSettings) + { + string newContent = JValue.Parse(JsonConvert.SerializeObject(newSettings)).ToString(Formatting.Indented); + if (originalSettings.Any()) + { + // during testing, creating this backup provided reliability in the event of aborted threads, and + // crashed processes. With this, I was not able to simulate a case where corruption happens, but there is no + // 100% gaurantee against corruption. + string originalContent = JValue.Parse(JsonConvert.SerializeObject(originalSettings)).ToString(Formatting.Indented); + SaveFile(m_bckUpFilePath, originalContent); + } + SaveFile(m_filePath, newContent); + if (File.Exists(m_bckUpFilePath)) + { + File.Delete(m_bckUpFilePath); + } + } + + private void SaveFile(string path, string content) + { + bool success = false; + int tries = 0; + int retryDelayMilliseconds = 10; + const int maxNumberOfRetries = 6; + do + { + try + { + using (var fs = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Delete)) + { + using (var sw = new StreamWriter(fs, Encoding.UTF8)) + { + sw.Write(content); + } + } + success = true; + } + catch (IOException) + { + if (++tries > maxNumberOfRetries) + { + throw; + } + Task.Delay(retryDelayMilliseconds).Wait(); + retryDelayMilliseconds *= 2; + } + } + while (!success); + } + + protected MutexScope GetNewMutexScope() + { + return new MutexScope(m_filePath.Replace(Path.DirectorySeparatorChar, '_')); + } + + protected struct MutexScope : IDisposable + { + public MutexScope(string name) + { + m_mutex = new Mutex(false, name); + + try + { + if (!m_mutex.WaitOne(s_mutexTimeout)) + { + throw new TimeoutException(); + } + } + catch (AbandonedMutexException) + { + // If this is thrown, then we hold the mutex. + } + } + + public void Dispose() + { + m_mutex.ReleaseMutex(); + } + + private readonly Mutex m_mutex; + private static readonly TimeSpan s_mutexTimeout = TimeSpan.FromSeconds(10); + } + } + } +} diff --git a/src/Sdk/Common/Common/Diagnostics/HttpRequestMessageExtensions.cs b/src/Sdk/Common/Common/Diagnostics/HttpRequestMessageExtensions.cs new file mode 100644 index 00000000000..30279a5c631 --- /dev/null +++ b/src/Sdk/Common/Common/Diagnostics/HttpRequestMessageExtensions.cs @@ -0,0 +1,31 @@ +using System; +using System.ComponentModel; +using System.Net.Http; + +namespace GitHub.Services.Common.Diagnostics +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal static class HttpRequestMessageExtensions + { + public static VssHttpMethod GetHttpMethod(this HttpRequestMessage message) + { + String methodName = message.Method.Method; + VssHttpMethod httpMethod = VssHttpMethod.UNKNOWN; + if (!Enum.TryParse(methodName, true, out httpMethod)) + { + httpMethod = VssHttpMethod.UNKNOWN; + } + return httpMethod; + } + + public static VssTraceActivity GetActivity(this HttpRequestMessage message) + { + Object traceActivity; + if (!message.Properties.TryGetValue(VssTraceActivity.PropertyName, out traceActivity)) + { + return VssTraceActivity.Empty; + } + return (VssTraceActivity)traceActivity; + } + } +} diff --git a/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs b/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs new file mode 100644 index 00000000000..ee267cd60e9 --- /dev/null +++ b/src/Sdk/Common/Common/Diagnostics/VssHttpEventSource.cs @@ -0,0 +1,1173 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.Tracing; +using System.Globalization; +using System.Net; +using System.Net.Http; +using System.Net.Sockets; +#if !NETSTANDARD +using System.Diagnostics.Eventing; +#endif + +namespace GitHub.Services.Common.Diagnostics +{ + [EventSource(Name = VssEventSources.Http)] + internal sealed class VssHttpEventSource : EventSource + { + public static class Tasks + { + public const EventTask HttpRequest = (EventTask)1; + public const EventTask Authentication = (EventTask)2; + public const EventTask HttpOperation = (EventTask)3; + } + + public static class Keywords + { + public const EventKeywords Authentication = (EventKeywords)0x0000000000000001; + public const EventKeywords HttpOperation = (EventKeywords)0x0000000000000002; + } + + /// + /// Gets the singleton event source used for logging. + /// + internal static VssHttpEventSource Log + { + get + { + return m_log.Value; + } + } + + [NonEvent] + public void AuthenticationStart(VssTraceActivity activity) + { + if (IsEnabled()) + { + SetActivityId(activity); + AuthenticationStart(); + } + } + + [NonEvent] + public void AuthenticationStop(VssTraceActivity activity) + { + if (IsEnabled()) + { + SetActivityId(activity); + AuthenticationStop(); + } + } + + [NonEvent] + public void AuthenticationError( + VssTraceActivity activity, + IssuedTokenProvider provider, + String message) + { + if (IsEnabled(EventLevel.Error, Keywords.Authentication)) + { + SetActivityId(activity); + WriteMessageEvent(provider.CredentialType, provider.GetHashCode(), message, this.AuthenticationError); + } + } + + [NonEvent] + public void AuthenticationError( + VssTraceActivity activity, + IssuedTokenProvider provider, + Exception exception) + { + if (IsEnabled(EventLevel.Error, Keywords.Authentication)) + { + if (exception is AggregateException) + { + exception = ((AggregateException)exception).Flatten().InnerException; + } + + AuthenticationError(activity, provider, exception.ToString()); + } + } + + [NonEvent] + public void HttpOperationStart( + VssTraceActivity activity, + String area, + String operation) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpOperationStart(area, operation); + } + } + + [NonEvent] + public void HttpOperationStop( + VssTraceActivity activity, + String area, + String operation) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpOperationStop(area, operation); + } + } + + [NonEvent] + public void HttpRequestStart( + VssTraceActivity activity, + HttpRequestMessage request) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestStart(request.GetHttpMethod(), request.RequestUri.AbsoluteUri); + } + } + + [NonEvent] + public Exception HttpRequestFailed( + VssTraceActivity activity, + HttpRequestMessage request, + Exception exception) + { + if (IsEnabled()) + { + HttpRequestFailed(activity, request, exception.ToString()); + } + return exception; + } + + [NonEvent] + public void HttpRequestFailed( + VssTraceActivity activity, + HttpRequestMessage request, + String message) + { + if (IsEnabled()) + { + SetActivityId(activity); + WriteMessageEvent(request.GetHttpMethod(), request.RequestUri.AbsoluteUri, message, this.HttpRequestFailed); + } + } + + [NonEvent] + public void HttpRequestFailed( + VssTraceActivity activity, + HttpRequestMessage request, + HttpStatusCode statusCode, + string afdRefInfo) + { + if (IsEnabled()) + { + SetActivityId(activity); + CultureInfo cultureInfo = CultureInfo.InstalledUICulture; + String message = String.Format(cultureInfo, "HTTP Status: {0}", statusCode); + + if (!string.IsNullOrEmpty(afdRefInfo)) + { + message += $", AFD Ref: {afdRefInfo}"; + } + + WriteMessageEvent(request.GetHttpMethod(), request.RequestUri.AbsoluteUri, message, this.HttpRequestFailed); + } + } + + [NonEvent] + public void HttpRequestUnauthorized( + VssTraceActivity activity, + HttpRequestMessage request, + String message) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestUnauthorized(request.GetHttpMethod(), request.RequestUri.AbsoluteUri, message); + } + } + + [NonEvent] + public void HttpRequestSucceeded( + VssTraceActivity activity, + HttpResponseMessage response) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestSucceeded(response.RequestMessage.GetHttpMethod(), response.RequestMessage.RequestUri.AbsoluteUri, (Int32)response.StatusCode); + } + } + + [NonEvent] + public void HttpRequestRetrying( + VssTraceActivity activity, + HttpRequestMessage request, + Int32 attempt, + TimeSpan backoffDuration, + HttpStatusCode? httpStatusCode, + WebExceptionStatus? webExceptionStatus, + SocketError? socketErrorCode, + WinHttpErrorCode? winHttpErrorCode, + CurlErrorCode? curlErrorCode, + string afdRefInfo) + { + if (IsEnabled()) + { + String reason = ""; + if (httpStatusCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "HTTP Status: {0}", httpStatusCode.Value); + } + else if (webExceptionStatus != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "Web Exception Status: {0}", webExceptionStatus.Value); + } + else if (socketErrorCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "Socket Error: {0}", socketErrorCode.Value); + } + else if (winHttpErrorCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "WinHttp Error: {0}", winHttpErrorCode); + } + else if (curlErrorCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "Curl Error: {0}", curlErrorCode); + } + + if (!string.IsNullOrEmpty(afdRefInfo)) + { + reason += $", AFD Ref: {afdRefInfo}"; + } + + SetActivityId(activity); + HttpRequestRetrying(request.GetHttpMethod(), request.RequestUri.AbsoluteUri, attempt, reason, backoffDuration.TotalSeconds); + } + } + + [NonEvent] + public void HttpRequestFailedMaxAttempts( + VssTraceActivity activity, + HttpRequestMessage request, + Int32 attempt, + HttpStatusCode? httpStatusCode, + WebExceptionStatus? webExceptionStatus, + SocketError? socketErrorCode, + WinHttpErrorCode? winHttpErrorCode, + CurlErrorCode? curlErrorCode, + string afdRefInfo) + { + if (IsEnabled()) + { + String reason = ""; + if (httpStatusCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "HTTP Status: {0}", httpStatusCode.Value); + } + else if (webExceptionStatus != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "Web Exception Status: {0}", webExceptionStatus.Value); + } + else if (socketErrorCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "Socket Error: {0}", socketErrorCode.Value); + } + else if (winHttpErrorCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "WinHttp Error: {0}", winHttpErrorCode); + } + else if (curlErrorCode != null) + { + reason = String.Format(CultureInfo.InvariantCulture, "Curl Error: {0}", curlErrorCode); + } + + if (!string.IsNullOrEmpty(afdRefInfo)) + { + reason += $", AFD Ref: {afdRefInfo}"; + } + + SetActivityId(activity); + HttpRequestFailedMaxAttempts(request.GetHttpMethod(), request.RequestUri.AbsoluteUri, attempt, reason); + } + } + + [NonEvent] + public void HttpRequestSucceededWithRetry( + VssTraceActivity activity, + HttpResponseMessage response, + Int32 attempt) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestSucceededWithRetry(response.RequestMessage.GetHttpMethod(), response.RequestMessage.RequestUri.AbsoluteUri, attempt); + } + } + + [NonEvent] + public void HttpRequestCancelled( + VssTraceActivity activity, + HttpRequestMessage request) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestCancelled(request.GetHttpMethod(), request.RequestUri.AbsoluteUri); + } + } + + [NonEvent] + public void HttpRequestTimedOut( + VssTraceActivity activity, + HttpRequestMessage request, + TimeSpan timeout) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestTimedOut(request.GetHttpMethod(), request.RequestUri.AbsoluteUri, (Int32)timeout.TotalSeconds); + } + } + + [NonEvent] + public void HttpRequestStop( + VssTraceActivity activity, + HttpResponseMessage response) + { + if (IsEnabled()) + { + SetActivityId(activity); + HttpRequestStop(response.RequestMessage.GetHttpMethod(), response.RequestMessage.RequestUri.AbsoluteUri, (Int32)response.StatusCode); + } + } + + [NonEvent] + public void AuthenticationFailed( + VssTraceActivity activity, + HttpResponseMessage response) + { + if (IsEnabled()) + { + SetActivityId(activity); + WriteMessageEvent((Int32)response.StatusCode, response.Headers.ToString(), this.AuthenticationFailed); + } + } + + [NonEvent] + public void IssuedTokenProviderCreated( + VssTraceActivity activity, + IssuedTokenProvider provider) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenProviderCreated(provider.CredentialType, provider.GetHashCode(), provider.GetAuthenticationParameters()); + } + } + + [NonEvent] + public void IssuedTokenProviderRemoved( + VssTraceActivity activity, + IssuedTokenProvider provider) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenProviderRemoved(provider.CredentialType, provider.GetHashCode(), provider.GetAuthenticationParameters()); + } + } + + [NonEvent] + internal void IssuedTokenProviderNotFound(VssTraceActivity activity) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenProviderNotFound(); + } + } + + [NonEvent] + internal void IssuedTokenProviderPromptRequired( + VssTraceActivity activity, + IssuedTokenProvider provider) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenProviderPromptRequired(provider.CredentialType, provider.GetHashCode()); + } + } + + [NonEvent] + public void IssuedTokenAcquiring( + VssTraceActivity activity, + IssuedTokenProvider provider) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenAcquiring(provider.CredentialType, provider.GetHashCode()); + } + } + + [NonEvent] + public void IssuedTokenWaitStart( + VssTraceActivity activity, + IssuedTokenProvider provider, + Guid waitForActivityId) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + SetActivityId(activity); + IssuedTokenWaitStart(provider.CredentialType, provider.GetHashCode(), waitForActivityId); + } + } + + [NonEvent] + public void IssuedTokenWaitStop( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken token) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + SetActivityId(activity); + IssuedTokenWaitStop(provider.CredentialType, provider.GetHashCode(), token != null ? token.GetHashCode() : 0); + } + } + + [NonEvent] + public void IssuedTokenAcquired( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken token) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenAcquired(provider.CredentialType, provider.GetHashCode(), token != null ? token.GetHashCode() : 0); + } + } + + [NonEvent] + public void IssuedTokenInvalidated( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken token) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenInvalidated(provider.CredentialType, provider.GetHashCode(), token.GetHashCode()); + } + } + + [NonEvent] + public void IssuedTokenValidated( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken token) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenValidated(provider.CredentialType, provider.GetHashCode(), token.GetHashCode()); + } + } + + [NonEvent] + public void IssuedTokenRetrievedFromCache( + VssTraceActivity activity, + IssuedTokenProvider provider, + IssuedToken token) + { + if (IsEnabled()) + { + SetActivityId(activity); + IssuedTokenRetrievedFromCache(provider.CredentialType, provider.GetHashCode(), token.GetHashCode()); + } + } + + [Event(1, Level = EventLevel.Verbose, Task = Tasks.HttpRequest, Opcode = EventOpcode.Start, Message = "Started {0} request to {1}")] + private void HttpRequestStart( + VssHttpMethod method, + String url) + { + if (IsEnabled()) + { + WriteEvent(1, (Int32)method, url); + } + } + + [Event(2, Level = EventLevel.Error, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "{0} request to {1} failed. {2}")] + private void HttpRequestFailed( + VssHttpMethod method, + String url, + String message) + { + if (IsEnabled()) + { + WriteEvent(2, (Int32)method, url, message); + } + } + + [Event(3, Level = EventLevel.Informational, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "{0} request to {1} succeeded with status code {2}")] + private void HttpRequestSucceeded( + VssHttpMethod method, + String url, + Int32 statusCode) + { + if (IsEnabled()) + { + WriteEvent(3, (Int32)method, url, statusCode); + } + } + + [Event(4, Level = EventLevel.Warning, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "Attempt {2} of {0} request to {1} failed ({3}). The operation will be retried in {4} seconds.")] + private void HttpRequestRetrying( + VssHttpMethod method, + String url, + Int32 attempt, + String reason, + Double backoffDurationInSeconds) + { + if (IsEnabled()) + { + WriteEvent(4, (Int32)method, url, attempt, reason, backoffDurationInSeconds); + } + } + + [Event(5, Level = EventLevel.Error, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "Attempt {2} of {0} request to {1} failed ({3}). The maximum number of attempts has been reached.")] + private void HttpRequestFailedMaxAttempts( + VssHttpMethod method, + String url, + Int32 attempt, + String reason) + { + if (IsEnabled()) + { + WriteEvent(5, (Int32)method, url, attempt, reason); + } + } + + [Event(6, Level = EventLevel.Verbose, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "Attempt {2} of {0} request to {1} succeeded.")] + private void HttpRequestSucceededWithRetry( + VssHttpMethod method, + String url, + Int32 attempt) + { + if (IsEnabled()) + { + WriteEvent(6, (Int32)method, url, attempt); + } + } + + [Event(7, Level = EventLevel.Warning, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "{0} request to {1} has been cancelled.")] + private void HttpRequestCancelled( + VssHttpMethod method, + String url) + { + if (IsEnabled()) + { + WriteEvent(7, (Int32)method, url); + } + } + + [Event(8, Level = EventLevel.Warning, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "{0} request to {1} timed out after {2} seconds.")] + private void HttpRequestTimedOut( + VssHttpMethod method, + String url, + Int32 timeoutInSeconds) + { + if (IsEnabled()) + { + WriteEvent(8, (Int32)method, url, timeoutInSeconds); + } + } + + [Event(9, Level = EventLevel.Error, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "{0} request to {1} is not authorized. Details: {2}")] + private void HttpRequestUnauthorized( + VssHttpMethod method, + String url, + String message) + { + if (IsEnabled()) + { + WriteEvent(9, (Int32)method, url, message); + } + } + + [Event(10, Keywords = Keywords.Authentication, Level = EventLevel.Warning, Task = Tasks.HttpRequest, Message = "Authentication failed with status code {0}.%n{1}")] + private void AuthenticationFailed( + Int32 statusCode, + String headers) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(10, statusCode, headers); + } + } + + [Event(11, Keywords = Keywords.Authentication, Level = EventLevel.Informational, Task = Tasks.HttpRequest, Message = "Authentication successful using {0} credentials")] + private void AuthenticationSucceeded(VssCredentialsType credentialsType) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(11, (Int32)credentialsType); + } + } + + [Event(12, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Opcode = EventOpcode.Start, Message = "Started authentication")] + private void AuthenticationStart() + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(12); + } + } + + [Event(13, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "Created {0} issued token provider instance {1} ({2})")] + private void IssuedTokenProviderCreated( + VssCredentialsType credentialsType, + Int32 providerId, + String parameters) + { + if (IsEnabled()) + { + WriteEvent(13, (Int32)credentialsType, providerId, parameters); + } + } + + [Event(14, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "Removed {0} issued token provider instance {1} ({2})")] + private void IssuedTokenProviderRemoved( + VssCredentialsType credentialsType, + Int32 providerId, + String parameters) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(14, (Int32)credentialsType, providerId, parameters); + } + } + + [Event(15, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "{0} issued token provider instance {1} is acquiring a token")] + private void IssuedTokenAcquiring( + VssCredentialsType credentialsType, + Int32 providerId) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(15, (Int32)credentialsType, providerId); + } + } + + [Event(16, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Opcode = EventOpcode.Suspend, Message = "{0} issued token provider instance {1} is waiting for issued token from activity {2}")] + private void IssuedTokenWaitStart( + VssCredentialsType credentialsType, + Int32 providerId, + Guid waitForActivityId) + { + WriteEvent(16, (Int32)credentialsType, providerId, waitForActivityId); + } + + [Event(17, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Opcode = EventOpcode.Resume, Message = "{0} issued token provider instance {1} received token instance {2}")] + private void IssuedTokenWaitStop( + VssCredentialsType credentialsType, + Int32 providerId, + Int32 issuedTokenId) + { + WriteEvent(17, (Int32)credentialsType, providerId, issuedTokenId); + } + + [Event(18, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "{0} issued token provider instance {1} acquired new token instance {2}")] + private void IssuedTokenAcquired( + VssCredentialsType credentialsType, + Int32 providerId, + Int32 issuedTokenId) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(18, (Int32)credentialsType, providerId, issuedTokenId); + } + } + + [Event(20, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "{0} issued token provider instance {1} invalidated token instance {2}")] + private void IssuedTokenInvalidated( + VssCredentialsType credentialsType, + Int32 providerId, + Int32 issuedTokenId) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(20, (Int32)credentialsType, providerId, issuedTokenId); + } + } + + [Event(21, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "{0} issued token provider instance {1} validated token instance {2}")] + private void IssuedTokenValidated( + VssCredentialsType credentialsType, + Int32 providerId, + Int32 issuedTokenId) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(21, (Int32)credentialsType, providerId, issuedTokenId); + } + } + + [Event(22, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Message = "{0} issued token provider instance {1} retrieved token instance {2}")] + private void IssuedTokenRetrievedFromCache( + VssCredentialsType credentialsType, + Int32 providerId, + Int32 issuedTokenId) + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(22, (Int32)credentialsType, providerId, issuedTokenId); + } + } + + [Event(23, Keywords = Keywords.Authentication, Level = EventLevel.Verbose, Task = Tasks.Authentication, Opcode = EventOpcode.Stop, Message = "Finished authentication")] + private void AuthenticationStop() + { + if (IsEnabled(EventLevel.Verbose, Keywords.Authentication)) + { + WriteEvent(23); + } + } + + [Event(24, Level = EventLevel.Verbose, Task = Tasks.HttpRequest, Opcode = EventOpcode.Stop, Message = "Finished {0} request to {1} with status code {2}")] + private void HttpRequestStop( + VssHttpMethod method, + String url, + Int32 statusCode) + { + if (IsEnabled()) + { + WriteEvent(24, (Int32)method, url, statusCode); + } + } + + [Event(25, Keywords = Keywords.HttpOperation, Level = EventLevel.Informational, Task = Tasks.HttpOperation, Opcode = EventOpcode.Start, Message = "Starting operation {0}.{1}")] + private void HttpOperationStart( + String area, + String operation) + { + if (IsEnabled(EventLevel.Informational, Keywords.HttpOperation)) + { + WriteEvent(25, area, operation); + } + } + + [Event(26, Keywords = Keywords.HttpOperation, Level = EventLevel.Informational, Task = Tasks.HttpOperation, Opcode = EventOpcode.Stop, Message = "Finished operation {0}.{1}")] + private void HttpOperationStop( + String area, + String operation) + { + if (IsEnabled(EventLevel.Informational, Keywords.HttpOperation)) + { + WriteEvent(26, area, operation); + } + } + + [Event(27, Keywords = Keywords.Authentication, Level = EventLevel.Error, Task = Tasks.Authentication, Opcode = EventOpcode.Info, Message = "{0} issued token provider instance {1} failed to retrieve a token.%nReason: {2}")] + private void AuthenticationError( + VssCredentialsType credentialsType, + Int32 providerId, + String message) + { + if (IsEnabled(EventLevel.Error, Keywords.Authentication)) + { + WriteEvent(27, (Int32)credentialsType, providerId, message); + } + } + + [Event(28, Keywords = Keywords.Authentication, Level = EventLevel.Warning, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "No issued token provider found which can handle the authentication challenge")] + private void IssuedTokenProviderNotFound() + { + if (IsEnabled(EventLevel.Warning, Keywords.Authentication)) + { + WriteEvent(28); + } + } + + [Event(29, Keywords = Keywords.Authentication, Level = EventLevel.Warning, Task = Tasks.HttpRequest, Opcode = EventOpcode.Info, Message = "{0} issued token provider instance {1} requires an interactive prompt which is not allowed by the current settings")] + private void IssuedTokenProviderPromptRequired( + VssCredentialsType credentialsType, + Int32 providerId) + { + if (IsEnabled(EventLevel.Warning, Keywords.Authentication)) + { + WriteEvent(29, (Int32)credentialsType, providerId); + } + } + + [Event(30, Keywords = Keywords.HttpOperation, Level = EventLevel.Critical, Task = Tasks.HttpOperation, Opcode = EventOpcode.Info, Message = "A task completion source was not properly completed during authentication")] + public void TokenSourceNotCompleted() + { + if (IsEnabled(EventLevel.Critical, Keywords.HttpOperation)) + { + WriteEvent(30); + } + } + + [Event(31, Keywords = Keywords.Authentication, Level = EventLevel.Warning, Task = Tasks.Authentication, Opcode = EventOpcode.Info, Message = "Retrieving an AAD auth token took a long time ({0} seconds)")] + public void AuthorizationDelayed(string timespan) + { + if(IsEnabled(EventLevel.Warning, Keywords.Authentication)) + { + WriteEvent(31, timespan); + } + } + + [Event(32, Keywords = Keywords.Authentication, Level = EventLevel.Informational, Task = Tasks.Authentication, Opcode = EventOpcode.Info, Message = "AAD Correlation ID for this token request: {0}")] + public void AADCorrelationID(string aadCorrelationId) + { + if (IsEnabled(EventLevel.Informational, Keywords.Authentication)) + { + WriteEvent(32, aadCorrelationId); + } + } + + /// + /// Sets the activity ID of the current thread. + /// + /// The trace activity which should be active on the calling thread + [NonEvent] + private void SetActivityId(VssTraceActivity activity) + { +#if !NETSTANDARD + if (activity != null) + { + Guid activityId = activity.Id; + EventProvider.SetActivityId(ref activityId); + } +#endif + } + + [NonEvent] + private static IList SplitMessage(String message) + { + List list = new List(); + if (message.Length > 30000) + { + int num = 0; + do + { + Int32 num2 = (message.Length - num > 30000) ? 30000 : (message.Length - num); + list.Add(message.Substring(num, num2)); + num += num2; + } + while (message.Length > num); + } + else + { + list.Add(message); + } + return list; + } + + [NonEvent] + private void WriteMessageEvent( + Int32 param0, + String message, + Action writeEvent) + { + writeEvent(param0, message); +#if !NETSTANDARD + if (EventProvider.GetLastWriteEventError() == EventProvider.WriteEventErrorCode.EventTooBig) + { + foreach (String messagePart in SplitMessage(message)) + { + writeEvent(param0, messagePart); + } + } +#endif + } + + [NonEvent] + private void WriteMessageEvent( + VssCredentialsType param0, + Int32 param1, + String message, + Action writeEvent) + { + writeEvent(param0, param1, message); +#if !NETSTANDARD + if (EventProvider.GetLastWriteEventError() == EventProvider.WriteEventErrorCode.EventTooBig) + { + foreach (String messagePart in SplitMessage(message)) + { + writeEvent(param0, param1, messagePart); + } + } +#endif + } + + [NonEvent] + private void WriteMessageEvent( + VssHttpMethod param0, + String param1, + String message, + Action writeEvent) + { + writeEvent(param0, param1, message); +#if !NETSTANDARD + if (EventProvider.GetLastWriteEventError() == EventProvider.WriteEventErrorCode.EventTooBig) + { + foreach (String messagePart in SplitMessage(message)) + { + writeEvent(param0, param1, messagePart); + } + } +#endif + } + + [NonEvent] +#if !NETSTANDARD + private unsafe void WriteEvent( +#else + private new unsafe void WriteEvent( +#endif + Int32 eventId, + Int32 param0, + String param1) + { + param1 = param1 ?? String.Empty; + + Int32 eventDataCount = 2; + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = (Int32)(param1.Length + 1) * sizeof(Char); + + fixed (Char* a1 = param1) + { + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)a1; + base.WriteEventCore(eventId, eventDataCount, eventData); + } + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + String param1, + String param2) + { + param1 = param1 ?? String.Empty; + param2 = param2 ?? String.Empty; + + Int32 eventDataCount = 3; + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = (Int32)(param1.Length + 1) * sizeof(Char); + eventData[2].Size = (Int32)(param2.Length + 1) * sizeof(Char); + + fixed (Char* a1 = param1, a2 = param2) + { + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)a1; + eventData[2].DataPointer = (IntPtr)a2; + base.WriteEventCore(eventId, eventDataCount, eventData); + } + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + Int32 param1, + Guid param2) + { + Int32 eventDataCount = 3; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = sizeof(Int32); + eventData[2].Size = sizeof(Guid); + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)(¶m1); + eventData[2].DataPointer = (IntPtr)(¶m2); + base.WriteEventCore(eventId, eventDataCount, eventData); + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + Int32 param1, + String param2) + { + param2 = param2 ?? String.Empty; + + Int32 eventDataCount = 3; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = sizeof(Int32); + eventData[2].Size = (Int32)(param2.Length + 1) * sizeof(Char); + fixed (Char* a2 = param2) + { + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)(¶m1); + eventData[2].DataPointer = (IntPtr)a2; + base.WriteEventCore(eventId, eventDataCount, eventData); + } + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + String param1, + Int32 param2) + { + param1 = param1 ?? String.Empty; + + Int32 eventDataCount = 3; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = (Int32)(param1.Length + 1) * sizeof(Char); + eventData[2].Size = sizeof(Int32); + fixed (Char* a1 = param1) + { + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)a1; + eventData[2].DataPointer = (IntPtr)(¶m2); + base.WriteEventCore(eventId, eventDataCount, eventData); + } + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + Int32 param1, + Int32 param2, + Guid param3) + { + Int32 eventDataCount = 4; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = sizeof(Int32); + eventData[2].Size = sizeof(Int32); + eventData[3].Size = sizeof(Guid); + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)(¶m1); + eventData[2].DataPointer = (IntPtr)(¶m2); + eventData[3].DataPointer = (IntPtr)(¶m3); + base.WriteEventCore(eventId, eventDataCount, eventData); + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + Int32 param1, + Guid param2, + Guid param3) + { + Int32 eventDataCount = 4; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = sizeof(Int32); + eventData[2].Size = sizeof(Guid); + eventData[3].Size = sizeof(Guid); + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)(¶m1); + eventData[2].DataPointer = (IntPtr)(¶m2); + eventData[3].DataPointer = (IntPtr)(¶m3); + base.WriteEventCore(eventId, eventDataCount, eventData); + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + String param1, + Int32 param2, + String param3) + { + param1 = param1 ?? String.Empty; + param3 = param3 ?? String.Empty; + + Int32 eventDataCount = 4; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = (Int32)(param1.Length + 1) * sizeof(Char); + eventData[2].Size = sizeof(Int32); + eventData[3].Size = (Int32)(param3.Length + 1) * sizeof(Char); + fixed (Char* a1 = param1, a3 = param3) + { + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)a1; + eventData[2].DataPointer = (IntPtr)(¶m2); + eventData[3].DataPointer = (IntPtr)a3; + base.WriteEventCore(eventId, eventDataCount, eventData); + } + } + + [NonEvent] + private unsafe void WriteEvent( + Int32 eventId, + Int32 param0, + String param1, + Int32 param2, + String param3, + Double param4) + { + param1 = param1 ?? String.Empty; + param3 = param3 ?? String.Empty; + + Int32 eventDataCount = 5; + + Byte* userData = stackalloc Byte[sizeof(EventData) * eventDataCount]; + EventData* eventData = (EventData*)userData; + + eventData[0].Size = sizeof(Int32); + eventData[1].Size = (Int32)(param1.Length + 1) * sizeof(Char); + eventData[2].Size = sizeof(Int32); + eventData[3].Size = (Int32)(param3.Length + 1) * sizeof(Char); + eventData[4].Size = sizeof(Double); + fixed (Char* a1 = param1, a3 = param3) + { + eventData[0].DataPointer = (IntPtr)(¶m0); + eventData[1].DataPointer = (IntPtr)a1; + eventData[2].DataPointer = (IntPtr)(¶m2); + eventData[3].DataPointer = (IntPtr)a3; + eventData[4].DataPointer = (IntPtr)(¶m4); + base.WriteEventCore(eventId, eventDataCount, eventData); + } + } + + private static Lazy m_log = new Lazy(() => new VssHttpEventSource()); + } + + public static class VssEventSources + { + public const String Http = "Microsoft-VSS-Http"; + } +} diff --git a/src/Sdk/Common/Common/Diagnostics/VssHttpMethod.cs b/src/Sdk/Common/Common/Diagnostics/VssHttpMethod.cs new file mode 100644 index 00000000000..03f3fac1e10 --- /dev/null +++ b/src/Sdk/Common/Common/Diagnostics/VssHttpMethod.cs @@ -0,0 +1,15 @@ + +namespace GitHub.Services.Common.Diagnostics +{ + internal enum VssHttpMethod + { + UNKNOWN, + DELETE, + HEAD, + GET, + OPTIONS, + PATCH, + POST, + PUT, + } +} diff --git a/src/Sdk/Common/Common/Diagnostics/VssTraceActivity.cs b/src/Sdk/Common/Common/Diagnostics/VssTraceActivity.cs new file mode 100644 index 00000000000..b222f1a2f0a --- /dev/null +++ b/src/Sdk/Common/Common/Diagnostics/VssTraceActivity.cs @@ -0,0 +1,150 @@ +using System; +using System.Diagnostics; +#if !NETSTANDARD +using System.Runtime.Remoting.Messaging; +#endif +using System.Runtime.Serialization; + +namespace GitHub.Services.Common.Diagnostics +{ + /// + /// Represents a trace activity for correlating diagnostic traces together. + /// + [DataContract] + [Serializable] + public sealed class VssTraceActivity + { + private VssTraceActivity() + { + } + + private VssTraceActivity(Guid activityId) + { + this.Id = activityId; + } + + /// + /// Gets the unique identifier for the trace activity. + /// + [DataMember] + public Guid Id + { + get; + private set; + } + + /// + /// Gets the current trace activity if one is set on the current thread; otherwise, null. + /// + public static VssTraceActivity Current + { +#if !NETSTANDARD + get + { + return CallContext.LogicalGetData(VssTraceActivity.PropertyName) as VssTraceActivity; + } + private set + { + CallContext.LogicalSetData(VssTraceActivity.PropertyName, value); + } +#else + get + { + return null; + } + set { } +#endif + } + + /// + /// Gets the empty trace activity. + /// + public static VssTraceActivity Empty + { + get + { + return s_empty.Value; + } + } + + /// + /// Creates a disposable trace scope in which the current trace activity is activated for trace correlation. + /// The call context state for is updated within the scope to reference + /// the activated activity. + /// + /// A trace scope for correlating multiple traces together + public IDisposable EnterCorrelationScope() + { + return new CorrelationScope(this); + } + + /// + /// Gets the current activity or, if no activity is active on the current thread, creates a new activity for + /// trace correlation. + /// + /// The current trace activity or a new trace activity + public static VssTraceActivity GetOrCreate() + { + if (VssTraceActivity.Current != null) + { + return VssTraceActivity.Current; + } + else if (Trace.CorrelationManager.ActivityId == Guid.Empty) + { + return new VssTraceActivity(Guid.NewGuid()); + } + else + { + return new VssTraceActivity(Trace.CorrelationManager.ActivityId); + } + } + + /// + /// Creates a new trace activity optionally using the provided identifier. + /// + /// The activity identifier or none to have one generated + /// A new trace activity instance + public static VssTraceActivity New(Guid activityId = default(Guid)) + { + return new VssTraceActivity(activityId == default(Guid) ? Guid.NewGuid() : activityId); + } + + /// + /// Gets the property name used to cache this object on extensible objects. + /// + public const String PropertyName = "MS.VSS.Diagnostics.TraceActivity"; + private static Lazy s_empty = new Lazy(() => new VssTraceActivity(Guid.Empty)); + + private sealed class CorrelationScope : IDisposable + { + public CorrelationScope(VssTraceActivity activity) + { + m_previousActivity = VssTraceActivity.Current; + if (m_previousActivity == null || m_previousActivity.Id != activity.Id) + { + m_swap = true; + VssTraceActivity.Current = activity; + } + } + + public void Dispose() + { + if (m_swap) + { + try + { + m_swap = false; + } + finally + { + // Perform in a finally block to ensure consistency between the two variables + VssTraceActivity.Current = m_previousActivity; + } + } + } + + private Boolean m_swap; + private VssTraceActivity m_previousActivity; + } + } +} diff --git a/src/Sdk/Common/Common/ExceptionMappingAttribute.cs b/src/Sdk/Common/Common/ExceptionMappingAttribute.cs new file mode 100644 index 00000000000..6c64e5828a5 --- /dev/null +++ b/src/Sdk/Common/Common/ExceptionMappingAttribute.cs @@ -0,0 +1,54 @@ +using System; +using System.ComponentModel; + +namespace GitHub.Services.Common +{ + /// + /// Matches Exception Types to back compatible TypeName and TypeKey for the specified range + /// of REST Api versions. This allows the current server to send back compatible typename + /// and type key json when talking to older clients. It also allows current clients to translate + /// exceptions returned from older servers to a current client's exception type. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [AttributeUsage(AttributeTargets.Class, AllowMultiple = true, Inherited = false)] + public class ExceptionMappingAttribute : Attribute + { + /// + /// Matches Exception Types to back compatible TypeName and TypeKey for the specified range + /// of REST Api versions. This allows the current server to send back compatible typename + /// and type key json when talking to older clients. It also allows current clients to translate + /// exceptions returned from older servers to a current client's exception type. + /// + /// The inclusive minimum REST Api version for this mapping. + /// The exclusive maximum REST Api version for this mapping. + /// The original typekey to be returned by the server when processing requests within the REST Api range specified. + /// The original typeName to be returned by the server when processing requests within the REST Api range specified. + public ExceptionMappingAttribute(string minApiVersion, string exclusiveMaxApiVersion, string typeKey, string typeName) + { + MinApiVersion = new Version(minApiVersion); + ExclusiveMaxApiVersion = new Version(exclusiveMaxApiVersion); + TypeKey = typeKey; + TypeName = typeName; + } + + /// + /// The inclusive minimum REST Api version for this mapping. + /// + public Version MinApiVersion { get; private set; } + + /// + /// The exclusive maximum REST Api version for this mapping. + /// + public Version ExclusiveMaxApiVersion { get; private set; } + + /// + /// The original typekey to be returned by the server when processing requests within the REST Api range specified. + /// + public string TypeKey { get; private set; } + + /// + /// The original typeName to be returned by the server when processing requests within the REST Api range specified. + /// + public string TypeName { get; private set; } + } +} diff --git a/src/Sdk/Common/Common/Exceptions/AuthenticationExceptions.cs b/src/Sdk/Common/Common/Exceptions/AuthenticationExceptions.cs new file mode 100644 index 00000000000..48e85fa3b36 --- /dev/null +++ b/src/Sdk/Common/Common/Exceptions/AuthenticationExceptions.cs @@ -0,0 +1,58 @@ +using GitHub.Services.Common.Internal; +using System; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.Serialization; + +namespace GitHub.Services.Common +{ + [Serializable] + [SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")] + [ExceptionMapping("0.0", "3.0", "VssAuthenticationException", "GitHub.Services.Common.VssAuthenticationException, GitHub.Services.Common, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssAuthenticationException : VssException + { + public VssAuthenticationException() + { + } + + public VssAuthenticationException(String message) + : base(message) + { + } + + public VssAuthenticationException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected VssAuthenticationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")] + [ExceptionMapping("0.0", "3.0", "VssUnauthorizedException", "GitHub.Services.Common.VssUnauthorizedException, GitHub.Services.Common, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssUnauthorizedException : VssException + { + public VssUnauthorizedException() + : this(CommonResources.VssUnauthorizedUnknownServer()) + { + } + + public VssUnauthorizedException(String message) + : base(message) + { + } + + public VssUnauthorizedException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected VssUnauthorizedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/Common/Common/Exceptions/CommonExceptions.cs b/src/Sdk/Common/Common/Exceptions/CommonExceptions.cs new file mode 100644 index 00000000000..7960517a0b2 --- /dev/null +++ b/src/Sdk/Common/Common/Exceptions/CommonExceptions.cs @@ -0,0 +1,70 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.Serialization; + +namespace GitHub.Services.Common +{ + /// + /// Thrown when a config file fails to load + /// + /// Initializes an exception from serialized data + /// + /// object holding the serialized data + /// context info about the source or destination + protected VssServiceException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + /// + /// Gets the type name and key for serialization of this exception. + /// If not provided, the serializer will provide default values. + /// + public virtual void GetTypeNameAndKey(Version restApiVersion, out String typeName, out String typeKey) + { + GetTypeNameAndKeyForExceptionType(GetType(), restApiVersion, out typeName, out typeKey); + } + } +} diff --git a/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs b/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs new file mode 100644 index 00000000000..34c97b73a1d --- /dev/null +++ b/src/Sdk/Common/Common/Exceptions/PropertyExceptions.cs @@ -0,0 +1,63 @@ +using GitHub.Services.Common.Internal; +using System; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.Serialization; +using System.Security; + +namespace GitHub.Services.Common +{ + /// + /// Thrown when validating user input. Similar to ArgumentException but doesn't require the property to be an input parameter. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssPropertyValidationException", "GitHub.Services.Common.VssPropertyValidationException, GitHub.Services.Common, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssPropertyValidationException : VssServiceException + { + public VssPropertyValidationException(String propertyName, String message) + : base(message) + { + PropertyName = propertyName; + } + + public VssPropertyValidationException(String propertyName, String message, Exception innerException) + : base(message, innerException) + { + PropertyName = propertyName; + } + + protected VssPropertyValidationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + PropertyName = info.GetString("PropertyName"); + } + + public String PropertyName { get; set; } + + [SecurityCritical] + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("PropertyName", PropertyName); + } + } + + /// + /// PropertyTypeNotSupportedException - this is thrown when a type is DBNull or an Object type other than a Byte array. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "PropertyTypeNotSupportedException", "GitHub.Services.Common.PropertyTypeNotSupportedException, GitHub.Services.Common, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class PropertyTypeNotSupportedException : VssPropertyValidationException + { + public PropertyTypeNotSupportedException(String propertyName, Type type) + : base(propertyName, CommonResources.VssUnsupportedPropertyValueType(propertyName, type.FullName)) + { + } + + protected PropertyTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/Common/Common/GenerateConstantAttributes.cs b/src/Sdk/Common/Common/GenerateConstantAttributes.cs new file mode 100644 index 00000000000..a976d6cddcd --- /dev/null +++ b/src/Sdk/Common/Common/GenerateConstantAttributes.cs @@ -0,0 +1,98 @@ +// Microsoft Confidential +// Copyright (c) Microsoft Corporation. All rights reserved. + +using System; +using System.ComponentModel; + +namespace GitHub.Services.Common +{ + /// + /// Base class for constant generation. Allows types/fields to be generated + /// with an alternate name. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class GenerateConstantAttributeBase : Attribute + { + protected GenerateConstantAttributeBase(string alternateName = null) + { + AlternateName = alternateName; + } + + public string AlternateName { get; private set; } + } + + /// + /// Can be applied to a const/readonly-static field of a class/enum/struct, but is + /// only used when the containing type has the 'GenerateSpecificConstants' attribute applied. + /// This allows the developer to specify exactly what constants to include out of the containing type. + /// + [AttributeUsage(AttributeTargets.Field)] + [EditorBrowsable(EditorBrowsableState.Never)] + public class GenerateConstantAttribute : GenerateConstantAttributeBase + { + public GenerateConstantAttribute(string alternateName = null) + : base(alternateName) + { + } + } + + /// + /// Applied to any enum/class/struct. Causes the constants generator to create javascript constants + /// for all const/readonly-static fields contained by the type. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Enum | AttributeTargets.Struct)] + [EditorBrowsable(EditorBrowsableState.Never)] + public class GenerateAllConstantsAttribute : GenerateConstantAttribute + { + public GenerateAllConstantsAttribute(string alternateName = null) + : base(alternateName) + { + } + } + + /// + /// Applied to any enum/class/struct. Causes the constants generator to create javascript constants at runtime + /// for the type for any member constants/enumerated values that are tagged with the 'GenerateConstant' attribute. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Enum | AttributeTargets.Struct)] + [EditorBrowsable(EditorBrowsableState.Never)] + public class GenerateSpecificConstantsAttribute : GenerateConstantAttribute + { + public GenerateSpecificConstantsAttribute(string alternateName = null) + : base(alternateName) + { + } + } + + /// + /// Applied to a class that represents a data model which is serialized to javascript. + /// This attribute controls how TypeScript interfaces are generated for the class that + /// this is applied to. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Enum | AttributeTargets.Interface)] + [EditorBrowsable(EditorBrowsableState.Never)] + public class GenerateInterfaceAttribute : GenerateConstantAttributeBase + { + public GenerateInterfaceAttribute() + : this(true) + { + } + + public GenerateInterfaceAttribute(string alternateName) + : base(alternateName) + { + GenerateInterface = true; + } + + public GenerateInterfaceAttribute(bool generateInterface) + : base() + { + GenerateInterface = generateInterface; + } + + /// + /// Whether or not to generate a typescript interface for this type + /// + public bool GenerateInterface { get; set; } + } +} diff --git a/src/Sdk/Common/Common/IVssClientCertificateManager.cs b/src/Sdk/Common/Common/IVssClientCertificateManager.cs new file mode 100644 index 00000000000..feb32580c48 --- /dev/null +++ b/src/Sdk/Common/Common/IVssClientCertificateManager.cs @@ -0,0 +1,13 @@ +using System.Security.Cryptography.X509Certificates; + +namespace GitHub.Services.Common +{ + /// + /// An interface to allow custom implementations to + /// gather client certificates when necessary. + /// + public interface IVssClientCertificateManager + { + X509Certificate2Collection ClientCertificates { get; } + } +} diff --git a/src/Sdk/Common/Common/IVssHttpRetryInfo.cs b/src/Sdk/Common/Common/IVssHttpRetryInfo.cs new file mode 100644 index 00000000000..4272ca722e1 --- /dev/null +++ b/src/Sdk/Common/Common/IVssHttpRetryInfo.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + public interface IVssHttpRetryInfo + { + void InitialAttempt(HttpRequestMessage request); + + void Retry(TimeSpan sleep); + + void Reset(); + } +} diff --git a/src/Sdk/Common/Common/Performance/PerformanceTimerConstants.cs b/src/Sdk/Common/Common/Performance/PerformanceTimerConstants.cs new file mode 100644 index 00000000000..6ee1fde3f11 --- /dev/null +++ b/src/Sdk/Common/Common/Performance/PerformanceTimerConstants.cs @@ -0,0 +1,26 @@ +using System; + +namespace GitHub.Services.Common +{ + public static class PerformanceTimerConstants + { + public const string Header = "X-VSS-PerfData"; + public const string PerfTimingKey = "PerformanceTimings"; + + [Obsolete] + public const string Aad = "AAD"; // Previous timer, broken into Token and Graph below + + public const string AadToken = "AadToken"; + public const string AadGraph = "AadGraph"; + public const string BlobStorage = "BlobStorage"; + public const string FinalSqlCommand = "FinalSQLCommand"; + public const string Redis = "Redis"; + public const string ServiceBus = "ServiceBus"; + public const string Sql = "SQL"; + public const string SqlReadOnly = "SQLReadOnly"; + public const string SqlRetries = "SQLRetries"; + public const string TableStorage = "TableStorage"; + public const string VssClient = "VssClient"; + public const string DocumentDB = "DocumentDB"; + } +} diff --git a/src/Sdk/Common/Common/Performance/PerformanceTimingGroup.cs b/src/Sdk/Common/Common/Performance/PerformanceTimingGroup.cs new file mode 100644 index 00000000000..3c87957065c --- /dev/null +++ b/src/Sdk/Common/Common/Performance/PerformanceTimingGroup.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.Common +{ + /// + /// A set of performance timings all keyed off of the same string + /// + [DataContract] + public class PerformanceTimingGroup + { + public PerformanceTimingGroup() + { + this.Timings = new List(); + } + + /// + /// Overall duration of all entries in this group in ticks + /// + [DataMember(EmitDefaultValue = false)] + public long ElapsedTicks { get; set; } + + /// + /// The total number of timing entries associated with this group + /// + [DataMember(EmitDefaultValue = false)] + public int Count { get; set; } + + /// + /// A list of timing entries in this group. Only the first few entries in each group are collected. + /// + [DataMember] + public List Timings { get; private set; } + } + + /// + /// A single timing consisting of a duration and start time + /// + [DataContract] + public struct PerformanceTimingEntry + { + /// + /// Duration of the entry in ticks + /// + [DataMember] + public long ElapsedTicks { get; set; } + + /// + /// Offset from Server Request Context start time in microseconds + /// + [DataMember] + public long StartOffset { get; set; } + + /// + /// Properties to distinguish timings within the same group or to provide data to send with telemetry + /// + [DataMember(EmitDefaultValue = false)] + public IDictionary Properties { get; set; } + } +} diff --git a/src/Sdk/Common/Common/TaskCancellationExtensions.cs b/src/Sdk/Common/Common/TaskCancellationExtensions.cs new file mode 100644 index 00000000000..9b9c0767fbd --- /dev/null +++ b/src/Sdk/Common/Common/TaskCancellationExtensions.cs @@ -0,0 +1,107 @@ +using System; +using System.Globalization; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + public static class TaskCancellationExtensions + { + private struct Void { } + + /// + /// Some APIs (e.g. HttpClient) don't honor cancellation tokens. This wrapper adds an extra layer of cancellation checking. + /// + public static Task EnforceCancellation( + this Task task, + CancellationToken cancellationToken, + Func makeMessage = null, + [CallerFilePath] string file = "", + [CallerMemberName] string member = "", + [CallerLineNumber] int line = -1) + { + Func> task2 = async () => + { + await task.ConfigureAwait(false); + return new Void(); + }; + + return task2().EnforceCancellation(cancellationToken, makeMessage, file, member, line); + } + + /// + /// Some APIs (e.g. HttpClient) don't honor cancellation tokens. This wrapper adds an extra layer of cancellation checking. + /// + public static async Task EnforceCancellation( + this Task task, + CancellationToken cancellationToken, + Func makeMessage = null, + [CallerFilePath] string file = "", + [CallerMemberName] string member = "", + [CallerLineNumber] int line = -1) + { + ArgumentUtility.CheckForNull(task, nameof(task)); + + // IsCompleted will return true when the task is in one of the three final states: RanToCompletion, Faulted, or Canceled. + if (task.IsCompleted) + { + return await task; + } + + var cancellationTcs = new TaskCompletionSource(RUN_CONTINUATIONS_ASYNCHRONOUSLY); + using (cancellationToken.Register(() => cancellationTcs.SetResult(false))) + { + var completedTask = await Task.WhenAny(task, cancellationTcs.Task).ConfigureAwait(false); + if (completedTask == task) + { + return await task; + } + } + + // Even if our actual task actually did honor the cancellation token, there's still a race that our WaitForCancellation + // task may have handled the cancellation more quickly. + if (!cancellationToken.IsCancellationRequested) + { + throw new InvalidOperationException("Task ended but cancellation token is not marked for cancellation."); + } + + // However, we'd ideally like to throw the cancellation exception from the original task if we can. + // Thus, we'll give that task a few seconds to coallesce (e.g. write to a log) before we give up on it. + int seconds = 3; + var lastChanceTcs = new TaskCompletionSource(RUN_CONTINUATIONS_ASYNCHRONOUSLY); + using (var lastChanceTimer = new CancellationTokenSource(TimeSpan.FromSeconds(seconds))) + using (lastChanceTimer.Token.Register(() => lastChanceTcs.SetResult(false))) + { + var completedTask = await Task.WhenAny(task, lastChanceTcs.Task).ConfigureAwait(false); + if (completedTask == task) + { + return await task; + } + } + + // At this point, we've given up on waiting for this task. + ObserveExceptionIfNeeded(task); + + string errorString = $"Task in function {member} at {file}:{line} was still active {seconds} seconds after operation was cancelled."; + if (makeMessage != null) + { + errorString += $" {makeMessage()}"; + } + + throw new OperationCanceledException(errorString, cancellationToken); + } + + private static void ObserveExceptionIfNeeded(Task task) + { + task.ContinueWith(t => t.Exception, TaskContinuationOptions.OnlyOnFaulted); + } + + /// + /// This is a flag exposed by TaskCreationOptions and TaskContinuationOptions but it's not in .Net 4.5 + /// In Azure we have latest .Net loaded which will consume this flag. + /// Client environments using earlier .Net would ignore it. + /// + private const int RUN_CONTINUATIONS_ASYNCHRONOUSLY = 0x40; + } +} diff --git a/src/Sdk/Common/Common/Utility/ArgumentUtility.cs b/src/Sdk/Common/Common/Utility/ArgumentUtility.cs new file mode 100644 index 00000000000..55500639355 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/ArgumentUtility.cs @@ -0,0 +1,1248 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Runtime.CompilerServices; +using System.Text.RegularExpressions; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ArgumentUtility + { + /// + /// Throw an exception if the object is null. + /// + /// the object to check + /// the variable or parameter name to display + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForNull(Object var, String varName) + { + CheckForNull(var, varName, null); + } + + /// + /// Throw an exception if the object is null. + /// + /// the object to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForNull(Object var, String varName, String expectedServiceArea) + { + if (var == null) + { + throw new ArgumentNullException(varName).Expected(expectedServiceArea); + } + } + + /// + /// Throw an exception if a string is null or empty. + /// + /// string to check + /// the variable or parameter name to display + public static void CheckStringForNullOrEmpty(String stringVar, String stringVarName) + { + CheckStringForNullOrEmpty(stringVar, stringVarName, null); + } + + /// + /// Throw an exception if a string is null or empty. + /// + /// string to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForNullOrEmpty(String stringVar, String stringVarName, String expectedServiceArea) + { + CheckStringForNullOrEmpty(stringVar, stringVarName, false, expectedServiceArea); + } + + public static void CheckForNonnegativeInt(int var, String varName) + { + CheckForNonnegativeInt(var, varName, null); + } + + public static void CheckForNonnegativeInt(int var, String varName, String expectedServiceArea) + { + if (var < 0) + { + throw new ArgumentOutOfRangeException(varName).Expected(expectedServiceArea); + } + } + + /// + /// Throws and exception if an integer is less than 1 + /// + /// integer to check + /// the variable or parameter name to display + public static void CheckForNonPositiveInt(int var, String varName) + { + CheckForNonPositiveInt(var, varName, null); + } + + /// + /// Throws and exception if an integer is less than 1 + /// + /// integer to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckForNonPositiveInt(int var, String varName, String expectedServiceArea) + { + if (var <= 0) + { + throw new ArgumentOutOfRangeException(varName).Expected(expectedServiceArea); + } + } + + /// + /// Throw an exception if a string is null or empty. + /// + /// string to check + /// the variable or parameter name to display + /// If true, will trim the string after it is determined not to be null + public static void CheckStringForNullOrEmpty(String stringVar, String stringVarName, bool trim) + { + CheckStringForNullOrEmpty(stringVar, stringVarName, trim, null); + } + + /// + /// Throw an exception if a string is null or empty. + /// + /// string to check + /// the variable or parameter name to display + /// If true, will trim the string after it is determined not to be null + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForNullOrEmpty(String stringVar, String stringVarName, bool trim, String expectedServiceArea) + { + CheckForNull(stringVar, stringVarName, expectedServiceArea); + if (trim == true) + { + stringVar = stringVar.Trim(); + } + if (stringVar.Length == 0) + { + throw new ArgumentException(CommonResources.EmptyStringNotAllowed(), stringVarName).Expected(expectedServiceArea); + } + } + + /// + /// Throw an exception if a string is null, too short, or too long. + /// + /// string to check + /// the variable or parameter name to display + /// Maximum allowed string length + /// Minimum allowed string length + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringLength( + string stringVar, + string stringVarName, + int maxLength, + int minLength = 0, + string expectedServiceArea = null) + { + CheckForNull(stringVar, stringVarName, expectedServiceArea); + + if (stringVar.Length < minLength || stringVar.Length > maxLength) + { + throw new ArgumentException( + CommonResources.StringLengthNotAllowed(stringVarName, minLength, maxLength), + stringVarName) + .Expected(expectedServiceArea); + } + } + + /// + /// Check a Collection for the Max Length + /// + /// enumerable to check + /// the variable or parameter name to display + /// Max allowed Length + public static void CheckCollectionForMaxLength(ICollection collection, string collectionName, int maxLength) + { + if (collection?.Count > maxLength) + { + throw new ArgumentException(CommonResources.CollectionSizeLimitExceeded(collectionName, maxLength)); + } + } + + /// + /// Throw an exception if IEnumerable is null or empty. + /// + /// enumerable to check + /// the variable or parameter name to display + public static void CheckEnumerableForNullOrEmpty(IEnumerable enumerable, String enumerableName) + { + CheckEnumerableForNullOrEmpty(enumerable, enumerableName, null); + } + + /// + /// Throw an exception if IEnumerable is null or empty. + /// + /// enumerable to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckEnumerableForNullOrEmpty(IEnumerable enumerable, String enumerableName, String expectedServiceArea) + { + CheckForNull(enumerable, enumerableName, expectedServiceArea); + + IEnumerator enumerator = enumerable.GetEnumerator(); + if (!enumerator.MoveNext()) + { + throw new ArgumentException(CommonResources.EmptyCollectionNotAllowed(), enumerableName).Expected(expectedServiceArea); + } + } + + /// + /// Throw an exception if IEnumerable contains a null element. + /// + /// enumerable to check + /// the variable or parameter name to display + public static void CheckEnumerableForNullElement(IEnumerable enumerable, String enumerableName) + { + CheckEnumerableForNullElement(enumerable, enumerableName, null); + } + + /// + /// Throw an exception if IEnumerable contains a null element. + /// + /// enumerable to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckEnumerableForNullElement(IEnumerable enumerable, String enumerableName, String expectedServiceArea) + { + CheckForNull(enumerable, enumerableName, expectedServiceArea); + + IEnumerator enumerator = enumerable.GetEnumerator(); + while (enumerator.MoveNext()) + { + if (enumerator.Current == null) + { + throw new ArgumentException(CommonResources.NullElementNotAllowedInCollection(), enumerableName).Expected(expectedServiceArea); + } + } + } + + /// + /// Throw an exception if the guid is equal to Guid.Empty. + /// + /// the guid to check + /// the variable or parameter name to display + public static void CheckForEmptyGuid(Guid guid, String varName) + { + CheckForEmptyGuid(guid, varName, null); + } + + /// + /// Throw an exception if the guid is equal to Guid.Empty. + /// + /// the guid to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckForEmptyGuid(Guid guid, String varName, String expectedServiceArea) + { + if (guid.Equals(Guid.Empty)) + { + throw new ArgumentException(CommonResources.EmptyGuidNotAllowed(varName), varName).Expected(expectedServiceArea); + } + } + + /// + /// Throw an exception if the value contains more than one bit set. + /// + /// the value to check + /// the variable or parameter name to display + public static void CheckForMultipleBits(int value, String varName) + { + CheckForMultipleBits(value, varName, null); + } + + /// + /// Throw an exception if the value contains more than one bit set. + /// + /// the value to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckForMultipleBits(int value, String varName, String expectedServiceArea) + { + if (0 == value || + (value & (value - 1)) != 0) + { + throw new ArgumentException(CommonResources.SingleBitRequired(varName), varName).Expected(expectedServiceArea); + } + } + + /// + /// Throw an exception if the value equals the default for the type. + /// + /// the value to check + /// the variable or parameter name to display + public static void CheckForDefault(T value, String varName) + { + if (EqualityComparer.Default.Equals(value, default(T))) + { + throw new ArgumentException(CommonResources.DefaultValueNotAllowed(varName), varName); + } + } + + /// + /// Checks if character is not displayable. + /// + /// + /// Carriage return and line-feed is considered legal if the allowCrLf parameter is set to true. + /// A character is "not displayable" if it's UnicodeCategory is in the set {LineSeparator, ParagraphSeparator, Control, Format, OtherNotAssigned}. + public static bool IsIllegalInputCharacter(char c, Boolean allowCrLf = false) + { + if (allowCrLf && (c == '\r' || c == '\n')) + { + return false; + } + + UnicodeCategory cat = Char.GetUnicodeCategory(c); + + // see http://www.w3.org/TR/REC-xml/#charsets + return (cat == UnicodeCategory.LineSeparator + || cat == UnicodeCategory.ParagraphSeparator + || cat == UnicodeCategory.Control + || cat == UnicodeCategory.Format + || cat == UnicodeCategory.OtherNotAssigned); + } + + /// + /// Replace illegal characters with specified character. A character is considered illegal as per definition of + /// + public static string ReplaceIllegalCharacters(string str, char replaceWith, bool allowCrLf = false) + { + if (IsIllegalInputCharacter(replaceWith, allowCrLf)) + { + throw new ArgumentException(CommonResources.VssInvalidUnicodeCharacter((int)replaceWith), nameof(replaceWith)); + } + + if (string.IsNullOrEmpty(str)) + { + return str; + } + + char[] strArray = str.ToCharArray(); + for (int i = 0; i < strArray.Length; i++) + { + if (IsIllegalInputCharacter(strArray[i], allowCrLf: allowCrLf)) + { + strArray[i] = replaceWith; + } + } + + return new string(strArray); + } + + /// + /// Checks for invalid unicode characters + /// + /// + /// + public static void CheckStringForInvalidCharacters(String stringVar, String stringVarName) + { + CheckStringForInvalidCharacters(stringVar, stringVarName, false, null); + } + + /// + /// Checks for invalid unicode characters + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForInvalidCharacters(String stringVar, String stringVarName, String expectedServiceArea) + { + CheckStringForInvalidCharacters(stringVar, stringVarName, false, expectedServiceArea); + } + + /// + /// Checks for invalid unicode characters + /// + /// + /// + /// + public static void CheckStringForInvalidCharacters(String stringVar, String stringVarName, Boolean allowCrLf) + { + CheckStringForInvalidCharacters(stringVar, stringVarName, allowCrLf, null); + } + + /// + /// Checks for invalid unicode characters + /// + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForInvalidCharacters(String stringVar, String stringVarName, Boolean allowCrLf, String expectedServiceArea) + { + Debug.Assert(!String.IsNullOrEmpty(stringVarName), "!String.IsNullOrEmpty(stringVarName)"); + + ArgumentUtility.CheckForNull(stringVar, stringVarName); + + for (int i = 0; i < stringVar.Length; i++) + { + if (IsIllegalInputCharacter(stringVar[i], allowCrLf)) + { + throw new ArgumentException(CommonResources.VssInvalidUnicodeCharacter((int)stringVar[i]), stringVarName).Expected(expectedServiceArea); + } + } + } + + /// + /// + /// + /// + /// + /// + public static void CheckStringForInvalidCharacters(String stringVar, String stringVarName, Char[] invalidCharacters) + { + CheckStringForInvalidCharacters(stringVar, stringVarName, invalidCharacters, null); + } + + /// + /// + /// + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForInvalidCharacters(String stringVar, String stringVarName, Char[] invalidCharacters, String expectedServiceArea) + { + Debug.Assert(null != stringVar, "null != stringVar"); + Debug.Assert(!String.IsNullOrEmpty(stringVarName), "!String.IsNullOrEmpty(stringVarName)"); + Debug.Assert(invalidCharacters != null, "invalidCharacters != null"); + + ArgumentUtility.CheckForNull(stringVar, stringVarName); + + for (int i = 0; i < invalidCharacters.Length; i++) + { + if (stringVar.IndexOf(invalidCharacters[i]) >= 0) + { + throw new ArgumentException(CommonResources.StringContainsInvalidCharacters(invalidCharacters[i]), stringVarName).Expected(expectedServiceArea); + } + } + } + + /// + /// Checks for escape sequences that are invalid in SQL + /// + /// The value to be checked + /// The name of the value to be checked + /// The service area where this exception is expected due to user input. See + public static void CheckStringForInvalidSqlEscapeCharacters(String stringVar, String stringVarName, String expectedServiceArea = null) + { + Debug.Assert(!String.IsNullOrEmpty(stringVar), "null != stringVar"); + Debug.Assert(!String.IsNullOrEmpty(stringVarName), "!String.IsNullOrEmpty(stringVarName)"); + + ArgumentUtility.CheckStringForNullOrEmpty(stringVar, stringVarName); + + for (int i = 0; i < stringVar.Length - 1; i++) + { + if (stringVar[i] == '\\') + { + // Make sure the next character after the slash is a valid escape character + char escapedCharacter = stringVar[++i]; + if (escapedCharacter != '*' && escapedCharacter != '?' && escapedCharacter != '\\') + { + throw new ArgumentException(CommonResources.StringContainsInvalidCharacters('\\'), stringVarName).Expected(expectedServiceArea); + } + } + } + } + + public static void CheckBoundsInclusive(Int32 value, Int32 minValue, Int32 maxValue, String varName) + { + CheckBoundsInclusive(value, minValue, maxValue, varName, null); + } + + public static void CheckBoundsInclusive(Int32 value, Int32 minValue, Int32 maxValue, String varName, String expectedServiceArea) + { + Debug.Assert(!String.IsNullOrEmpty(varName), "!String.IsNullOrEmpty(stringVarName)"); + + if (value < minValue || value > maxValue) + { + throw new ArgumentOutOfRangeException(varName, CommonResources.ValueOutOfRange(value, varName, minValue, maxValue)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the value is out of range. + /// + /// The comparable type + /// the value to check + /// the variable or parameter name to display + /// minimum legal value + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(T var, string varName, T minimum) + where T : IComparable + { + CheckForOutOfRange(var, varName, minimum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the value is out of range. + /// + /// The comparable type + /// the value to check + /// the variable or parameter name to display + /// minimum legal value + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(T var, string varName, T minimum, String expectedServiceArea) + where T : IComparable + { + ArgumentUtility.CheckForNull(var, varName, expectedServiceArea); + if (var.CompareTo(minimum) < 0) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the value is out of range. + /// + /// The comparable type + /// the value to check + /// the variable or parameter name to display + /// minimum legal value + /// maximum legal value + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(T var, string varName, T minimum, T maximum) + where T : IComparable + { + CheckForOutOfRange(var, varName, minimum, maximum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the value is out of range. + /// + /// The comparable type + /// the value to check + /// the variable or parameter name to display + /// minimum legal value + /// maximum legal value + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(T var, string varName, T minimum, T maximum, String expectedServiceArea) + where T : IComparable + { + CheckForNull(var, varName, expectedServiceArea); + if (var.CompareTo(minimum) < 0 || var.CompareTo(maximum) > 0) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(int var, String varName, int minimum) + { + CheckForOutOfRange(var, varName, minimum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(int var, String varName, int minimum, String expectedServiceArea) + { + if (var < minimum) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + /// maximum legal value + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(int var, String varName, int minimum, int maximum) + { + CheckForOutOfRange(var, varName, minimum, maximum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + /// maximum legal value + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(int var, String varName, int minimum, int maximum, String expectedServiceArea) + { + if (var < minimum || var > maximum) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(long var, String varName, long minimum) + { + CheckForOutOfRange(var, varName, minimum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(long var, String varName, long minimum, String expectedServiceArea) + { + if (var < minimum) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + /// maximum legal value + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(long var, String varName, long minimum, long maximum) + { + CheckForOutOfRange(var, varName, minimum, maximum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the integer is out of range. + /// + /// the int to check + /// the variable or parameter name to display + /// minimum legal value + /// maximum legal value + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static void CheckForOutOfRange(long var, String varName, long minimum, long maximum, String expectedServiceArea) + { + if (var < minimum || var > maximum) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the date is not in the range. + /// + /// the DateTime to check + /// the variable or parameter name to display + /// minimum legal value + /// + //******************************************************************************************** + public static void CheckForDateTimeRange(DateTime var, String varName, DateTime minimum, DateTime maximum) + { + CheckForDateTimeRange(var, varName, minimum, maximum, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the date is not in the range. + /// + /// the DateTime to check + /// the variable or parameter name to display + /// minimum legal value + /// + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckForDateTimeRange(DateTime var, String varName, DateTime minimum, DateTime maximum, String expectedServiceArea) + { + if (var < minimum || var > maximum) + { + throw new ArgumentOutOfRangeException(varName, var, CommonResources.OutOfRange(var)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throws an exception if the provided value is less than zero. + /// + /// value to check + /// the variable or parameter name to display + //******************************************************************************************** + public static void CheckGreaterThanOrEqualToZero(float value, string valueName) + { + CheckGreaterThanOrEqualToZero(value, valueName, null); + } + + //******************************************************************************************** + /// + /// Throws an exception if the provided value is less than zero. + /// + /// value to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckGreaterThanOrEqualToZero(float value, string valueName, String expectedServiceArea) + { + if (value < 0) + { + throw new ArgumentException(CommonResources.ValueMustBeGreaterThanZero(), valueName).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throws an exception if the provided value is less than or equal to zero. + /// + /// value to check + /// the variable or parameter name to display + //******************************************************************************************** + public static void CheckGreaterThanZero(float value, string valueName) + { + CheckGreaterThanZero(value, valueName, null); + } + + //******************************************************************************************** + /// + /// Throws an exception if the provided value is less than or equal to zero. + /// + /// value to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckGreaterThanZero(float value, string valueName, String expectedServiceArea) + { + if (value <= 0) + { + throw new ArgumentException(CommonResources.ValueMustBeGreaterThanZero(), valueName).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the object is not null. + /// + /// the object to check + /// the variable or parameter name to display + //******************************************************************************************** + public static void EnsureIsNull(Object var, String varName) + { + EnsureIsNull(var, varName, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the object is not null. + /// + /// the object to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void EnsureIsNull(Object var, String varName, String expectedServiceArea) + { + if (var != null) + { + throw new ArgumentException(CommonResources.NullValueNecessary(varName)).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if the string is not entirely of a specified casing (lowercase, uppercase). + /// + /// The string to check. + /// The variable or parameter name to display. + /// Indicates whether the check should require + /// lowercase characters, as opposed to uppercase characters. + //******************************************************************************************** + public static void CheckStringCasing(String stringVar, String varName, Boolean checkForLowercase = true) + { + CheckStringCasing(stringVar, varName, checkForLowercase, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if the string is not entirely of a specified casing (lowercase, uppercase). + /// + /// The string to check. + /// The variable or parameter name to display. + /// Indicates whether the check should require + /// lowercase characters, as opposed to uppercase characters. + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckStringCasing(String stringVar, String varName, Boolean checkForLowercase = true, String expectedServiceArea = null) + { + foreach (Char c in stringVar) + { + if (Char.IsLetter(c) == true && + Char.IsLower(c) == !checkForLowercase) + { + throw new ArgumentException( + checkForLowercase ? + CommonResources.LowercaseStringRequired(varName) : + CommonResources.UppercaseStringRequired(varName)) + .Expected(expectedServiceArea); + } + } + } + + //******************************************************************************************** + /// + /// Throw an exception if IEnumerable is empty. + /// + /// enumerable to check + /// the variable or parameter name to display + //******************************************************************************************** + public static void CheckEnumerableForEmpty(IEnumerable enumerable, String enumerableName) + { + CheckEnumerableForEmpty(enumerable, enumerableName, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if IEnumerable is empty. + /// + /// enumerable to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckEnumerableForEmpty(IEnumerable enumerable, String enumerableName, String expectedServiceArea) + { + if (enumerable != null) + { + IEnumerator enumerator = enumerable.GetEnumerator(); + if (!enumerator.MoveNext()) + { + throw new ArgumentException(CommonResources.EmptyArrayNotAllowed(), enumerableName).Expected(expectedServiceArea); + } + } + } + + //******************************************************************************************** + /// + /// Throw an exception if a string is null, empty, or consists only of white-space characters. + /// + /// string to check + /// the variable or parameter name to display + //******************************************************************************************** + public static void CheckStringForNullOrWhiteSpace(String stringVar, String stringVarName) + { + CheckStringForNullOrWhiteSpace(stringVar, stringVarName, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if a string is null, empty, or consists only of white-space characters. + /// + /// string to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckStringForNullOrWhiteSpace(String stringVar, String stringVarName, String expectedServiceArea) + { + CheckForNull(stringVar, stringVarName, expectedServiceArea); + if (String.IsNullOrWhiteSpace(stringVar) == true) + { + throw new ArgumentException(CommonResources.EmptyOrWhiteSpaceStringNotAllowed(), stringVarName).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if a string length is not given value. + /// + /// string to check + /// length to check + /// the variable or parameter name to display + //******************************************************************************************** + public static void CheckStringExactLength(String stringVar, int length, String stringVarName) + { + CheckStringExactLength(stringVar, length, stringVarName, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if a string length is not given value. + /// + /// string to check + /// length to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckStringExactLength(String stringVar, int length, String stringVarName, String expectedServiceArea) + { + CheckForNull(stringVar, stringVarName, expectedServiceArea); + + if (stringVar.Length != length) + { + throw new ArgumentException(CommonResources.StringLengthNotMatch(length), stringVarName).Expected(expectedServiceArea); + } + } + + //******************************************************************************************** + /// + /// Throw an exception if one of the strings is not null or empty. + /// + /// the first object to check + /// the variable or parameter name to display for the first object + /// the second object to check + /// the variable or parameter name to display for the second object + //******************************************************************************************** + public static void CheckForBothStringsNullOrEmpty(String var1, String varName1, String var2, String varName2) + { + CheckForBothStringsNullOrEmpty(var1, varName1, var2, varName2, null); + } + + //******************************************************************************************** + /// + /// Throw an exception if one of the strings is not null or empty. + /// + /// the first object to check + /// the variable or parameter name to display for the first object + /// the second object to check + /// the variable or parameter name to display for the second object + /// the Service Area where this exception is expected due to user input. See + //******************************************************************************************** + public static void CheckForBothStringsNullOrEmpty(String var1, String varName1, String var2, String varName2, String expectedServiceArea) + { + if (String.IsNullOrEmpty(var1) && String.IsNullOrEmpty(var2)) + { + throw new ArgumentException(CommonResources.BothStringsCannotBeNull(varName1, varName2)).Expected(expectedServiceArea); + } + } + + /// + /// Checks if a string contains any whitespace characters. Throws an exception if it does. + /// + /// + /// + public static void CheckStringForAnyWhiteSpace(string stringVar, string stringVarName) + { + CheckStringForAnyWhiteSpace(stringVar, stringVarName, null); + } + + /// + /// Checks if a string contains any whitespace characters. Throws an exception if it does. + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForAnyWhiteSpace(string stringVar, string stringVarName, String expectedServiceArea) + { + if (stringVar != null) + { + for (Int32 i = 0; i < stringVar.Length; i++) + { + if (Char.IsWhiteSpace(stringVar[i])) + { + throw new ArgumentException(CommonResources.WhiteSpaceNotAllowed(), stringVarName).Expected(expectedServiceArea); + } + } + } + } + + /// + /// Performs a type check on the variable, and throws if there is a mismatch + /// + /// + /// + /// + /// + public static void CheckType(object var, string varName, string typeName) + { + CheckType(var, varName, typeName, null); + } + + /// + /// Performs a type check on the variable, and throws if there is a mismatch + /// + /// + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckType(object var, string varName, string typeName, String expectedServiceArea) + { + if (!(var is T)) + { + throw new ArgumentException(CommonResources.UnexpectedType(varName, typeName)).Expected(expectedServiceArea); + } + } + + /// + /// Checks if an enum value is defined on the enum type + /// + /// The type of the enum + /// The enum value + /// The name of the enum argument + public static void CheckForDefinedEnum(TEnum value, string enumVarName) + where TEnum : struct + { + CheckForDefinedEnum(value, enumVarName, null); + } + + /// + /// Checks if an enum value is defined on the enum type + /// + /// The type of the enum + /// The enum value + /// The name of the enum argument + /// the Service Area where this exception is expected due to user input. See + public static void CheckForDefinedEnum(TEnum value, string enumVarName, String expectedServiceArea) + where TEnum : struct + { + // IsEnumDefined throws ArgumentException if TEnum is not an enum type + if (!typeof(TEnum).IsEnumDefined(value)) + { + throw new global::System.ComponentModel.InvalidEnumArgumentException(enumVarName, (int)(object)value, typeof(TEnum)).Expected(expectedServiceArea); + } + } + + /// + /// Determines if a string value is a valid email address. Does NOT throw. + /// + /// + /// + public static Boolean IsValidEmailAddress(string emailAddress) + { + // WARNING: If you switch this to code to use the MailAddress class for validation, + // you need to evaluate all callers to see if they handle inputs like these: + // "John Smith " + // "" + // + // The MailAddress constructor supports those strings. + + return s_emailPattern.IsMatch(emailAddress); + } + + /// + /// Checks if a string is a valid email address. Throws an exception otherwise. + /// + /// + /// + public static void CheckEmailAddress(string stringVar, string stringVarName) + { + CheckEmailAddress(stringVar, stringVarName, null); + } + + /// + /// Checks if a string is a valid email address. Throws an exception otherwise. + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckEmailAddress(string stringVar, string stringVarName, String expectedServiceArea) + { + if (!IsValidEmailAddress(stringVar)) + { + throw new ArgumentException(CommonResources.InvalidEmailAddressError(), stringVarName).Expected(expectedServiceArea); + } + } + + /// + /// Checks if a string value is a valid URI in accordance with RFC 3986 and RFC 3987. Throws an exception otherwise. + /// + /// + /// + /// + public static void CheckIsValidURI(string uriString, UriKind uriKind, string stringVarName) + { + if (!Uri.IsWellFormedUriString(uriString, uriKind)) + { + throw new ArgumentException(CommonResources.InvalidUriError(uriKind), stringVarName); + } + } + + /// + /// + /// + /// + /// + public static void CheckStringForInvalidCharacters(string[] stringArrayVar, string stringArrayVarName) + { + CheckStringForInvalidCharacters(stringArrayVar, stringArrayVarName, null); + } + + /// + /// + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForInvalidCharacters(string[] stringArrayVar, string stringArrayVarName, String expectedServiceArea) + { + CheckStringForInvalidCharacters(stringArrayVar, stringArrayVarName, false, expectedServiceArea); + } + + /// + /// Checks for invalid unicode characters + /// + /// + /// + /// + public static void CheckStringForInvalidCharacters(string[] stringArrayVar, string stringArrayVarName, Boolean allowCrLf) + { + CheckStringForInvalidCharacters(stringArrayVar, stringArrayVarName, allowCrLf, null); + } + + /// + /// Checks for invalid unicode characters + /// + /// + /// + /// + /// the Service Area where this exception is expected due to user input. See + public static void CheckStringForInvalidCharacters(string[] stringArrayVar, string stringArrayVarName, Boolean allowCrLf, String expectedServiceArea) + { + Debug.Assert(null != stringArrayVar, "null != stringArrayVar"); + Debug.Assert(stringArrayVar.Length > 0, "stringArrayVar.Length > 0"); + Debug.Assert(!String.IsNullOrEmpty(stringArrayVarName), "!String.IsNullOrEmpty(stringArrayVarName)"); + + for (int i = 0; i < stringArrayVar.Length; i++) + { + CheckStringForInvalidCharacters(stringArrayVar[i], String.Format(CultureInfo.InvariantCulture, "{0}[{1}]", stringArrayVarName, i), allowCrLf, expectedServiceArea); + } + } + + /// + /// Throws an exception if the provided value equals to infinity. + /// + /// value to check + /// the variable or parameter name to display + public static void CheckValueEqualsToInfinity(float value, string valueName) + { + CheckValueEqualsToInfinity(value, valueName, null); + } + + /// + /// Throws an exception if the provided value equals to infinity. + /// + /// value to check + /// the variable or parameter name to display + /// the Service Area where this exception is expected due to user input. See + public static void CheckValueEqualsToInfinity(float value, string valueName, String expectedServiceArea) + { + if (float.IsInfinity(value)) + { + throw new ArgumentException(CommonResources.ValueEqualsToInfinity(), valueName).Expected(expectedServiceArea); + } + } + + private static readonly Regex s_emailPattern = new Regex(@"^([a-z0-9.!#$%&'*+/=?^_`{|}~-]+)@((\[[0-9]{1,3}" + + @"\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\" + + @".)+))([a-z]{2,63}|[0-9]{1,3})(\]?)$", RegexOptions.IgnoreCase); + + public static bool IsInvalidString(string strIn) + { + return IsInvalidString(strIn, false); + } + + public static bool IsInvalidString(string strIn, Boolean allowCrLf) + { + ArgumentUtility.CheckForNull(strIn, "strIn"); + + foreach (char c in strIn) + { + if (ArgumentUtility.IsIllegalInputCharacter(c, allowCrLf)) + { + return true; + } + } + + if (HasMismatchedSurrogates(strIn) == true) + { + return true; + } + + return false; + } + + public static bool HasSurrogates(string strIn) + { + for (int i = 0; i < strIn.Length; i++) + { + Char c = strIn[i]; + + if (char.IsSurrogate(c) == true) + { + return true; + } + } + return false; + } + + public static bool HasMismatchedSurrogates(string strIn) + { + for (int i = 0; i < strIn.Length; i++) + { + Char c = strIn[i]; + + // If this is a low surrogate, that means that there wasn't a preceeding high + // surrogate, and it is invalid + if (Char.IsLowSurrogate(c)) + { + return true; + } + + // is this the start of a surrogate pair? + if (Char.IsHighSurrogate(c)) + { + if (!Char.IsSurrogatePair(strIn, i)) + { + return true; + } + + // skip the low surogate + i++; + } + } + return false; + } + } +} diff --git a/src/Sdk/Common/Common/Utility/ArrayUtility.cs b/src/Sdk/Common/Common/Utility/ArrayUtility.cs new file mode 100644 index 00000000000..8c3e8b79030 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/ArrayUtility.cs @@ -0,0 +1,148 @@ +//************************************************************************************************* +// ArrayUtil.cs +// +// A class with random array processing helper routines. +// +// Copyright (c) Microsoft Corporation. All rights reserved. +//************************************************************************************************* +using System; +using System.Diagnostics; +using System.Text; + +namespace GitHub.Services.Common +{ + //******************************************************************************************** + /// + /// A class with random array processing helper routines. + /// + //******************************************************************************************** + public static class ArrayUtility + { + //**************************************************************************************** + /// + /// Compare two byte arrays to determine if they contain the same data. + /// + /// First array to compare. + /// Second array to compare. + /// true if the arrays are equal and false if not. + //**************************************************************************************** + public unsafe static bool Equals(byte[] a1, byte[] a2) + { + Debug.Assert(a1 != null, "a1 was null"); + Debug.Assert(a2 != null, "a2 was null"); + + // Check if the lengths are the same. + if (a1.Length != a2.Length) + { + return false; + } + if (a1.Length == 0) + { + return true; + } + + return Equals(a1, a2, a1.Length); + } + + //**************************************************************************************** + /// + /// Generate hash code for a byte array. + /// + /// array to generate hash code for. + /// hash generated from the array members. + //**************************************************************************************** + public static int GetHashCode(byte[] array) + { + Debug.Assert(array != null, "array was null"); + + int hash = 0; + // the C# compiler defaults to unchecked behavior, so this will + // wrap silently. Since this is a hash code and not a count, this + // is fine with us. + foreach (byte item in array) + { + hash += item; + } + + return hash; + } + + //**************************************************************************************** + /// + /// Compare two byte arrays to determine if they contain the same data. + /// + /// First array to compare. + /// Second array to compare. + /// # of bytes to compare. + /// true if the arrays are equal and false if not. + //**************************************************************************************** + public unsafe static bool Equals(byte[] a1, byte[] a2, int length) + { + // Pin the arrays so that we can use unsafe pointers to compare an int at a time. + fixed (byte* p1 = &a1[0]) + { + fixed (byte* p2 = &a2[0]) + { + // Get temps for the pointers because you can't change fixed pointers. + byte* q1 = p1, q2 = p2; + + // Compare an int at a time for as long as we can. We divide by four because an int + // is always 32 bits in C# regardless of platform. + int i; + for (i = length >> 2; i > 0; --i) + { + if (*((int*) q1) != *((int*) q2)) + { + return false; + } + q1 += sizeof(int); + q2 += sizeof(int); + } + + // Compare a byte at a time for the remaining bytes (0 - 3 of them). This also + // depends on ints being 32 bits. + for (i = length & 0x3; i > 0; --i) + { + if (*q1 != *q2) + { + return false; + } + ++q1; + ++q2; + } + } + } + return true; + } + + //**************************************************************************************** + /// + /// Convert the byte array to a lower case hex string. + /// + /// byte array to be converted. + /// hex string converted from byte array. + //**************************************************************************************** + public static String StringFromByteArray(byte[] bytes) + { + if (bytes == null || bytes.Length == 0) + { + return "null"; + } + + StringBuilder sb = new StringBuilder(bytes.Length * 2); + + for (int i = 0; i < bytes.Length; i++) + { + byte b = bytes[i]; + + char first = (char)(((b >> 4) & 0x0F) + 0x30); + char second = (char)((b & 0x0F) + 0x30); + + sb.Append(first >= 0x3A ? (char)(first + 0x27) : first); + sb.Append(second >= 0x3A ? (char)(second + 0x27) : second); + } + + return sb.ToString(); + } + } +} // namespace diff --git a/src/Sdk/Common/Common/Utility/BackoffTimerHelper.cs b/src/Sdk/Common/Common/Utility/BackoffTimerHelper.cs new file mode 100644 index 00000000000..f2c4df504f3 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/BackoffTimerHelper.cs @@ -0,0 +1,38 @@ +using System; +using System.ComponentModel; + +namespace GitHub.Services.Common +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class BackoffTimerHelper + { + public static TimeSpan GetRandomBackoff( + TimeSpan minBackoff, + TimeSpan maxBackoff, + TimeSpan? previousBackoff = null) + { + Random random = null; + if (previousBackoff.HasValue) + { + random = new Random((Int32)previousBackoff.Value.TotalMilliseconds); + } + else + { + random = new Random(); + } + + return TimeSpan.FromMilliseconds(random.Next((Int32)minBackoff.TotalMilliseconds, (Int32)maxBackoff.TotalMilliseconds)); + } + + public static TimeSpan GetExponentialBackoff( + Int32 attempt, + TimeSpan minBackoff, + TimeSpan maxBackoff, + TimeSpan deltaBackoff) + { + Double randomBackoff = (Double)new Random().Next((Int32)(deltaBackoff.TotalMilliseconds * 0.8), (Int32)(deltaBackoff.TotalMilliseconds * 1.2)); + Double additionalBackoff = attempt < 0 ? (Math.Pow(2.0, (Double)attempt)) * randomBackoff : (Math.Pow(2.0, (Double)attempt) - 1.0) * randomBackoff; + return TimeSpan.FromMilliseconds(Math.Min(minBackoff.TotalMilliseconds + additionalBackoff, maxBackoff.TotalMilliseconds)); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/CollectionsExtensions.cs b/src/Sdk/Common/Common/Utility/CollectionsExtensions.cs new file mode 100644 index 00000000000..f7d593378b4 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/CollectionsExtensions.cs @@ -0,0 +1,38 @@ +using System.Collections.Generic; + +namespace GitHub.Services.Common +{ + public static class CollectionsExtensions + { + /// + /// Adds all of the given values to this collection. + /// Can be used with dictionaries, which implement and where T is . + /// For dictionaries, also see + /// + public static TCollection AddRange(this TCollection collection, IEnumerable values) + where TCollection : ICollection + { + foreach (var value in values) + { + collection.Add(value); + } + + return collection; + } + + /// + /// Adds all of the given values to this collection if and only if the values object is not null. + /// See for more details. + /// + public static TCollection AddRangeIfRangeNotNull(this TCollection collection, IEnumerable values) + where TCollection : ICollection + { + if (values != null) + { + collection.AddRange(values); + } + + return collection; + } + } +} diff --git a/src/Sdk/Common/Common/Utility/ConvertUtility.cs b/src/Sdk/Common/Common/Utility/ConvertUtility.cs new file mode 100644 index 00000000000..b5f394a40e6 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/ConvertUtility.cs @@ -0,0 +1,29 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Reflection; + +namespace GitHub.Services.Common +{ + /// + /// Utility class for wrapping Convert.ChangeType to handle nullable values. + /// + public class ConvertUtility + { + public static object ChangeType(object value, Type type) + { + return ChangeType(value, type, CultureInfo.CurrentCulture); + } + + public static object ChangeType(object value, Type type, IFormatProvider provider) + { + if (type.IsOfType(typeof(Nullable<>))) + { + var nullableConverter = new NullableConverter(type); + return nullableConverter.ConvertTo(value, nullableConverter.UnderlyingType); + } + + return Convert.ChangeType(value, type, provider); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/DictionaryExtensions.cs b/src/Sdk/Common/Common/Utility/DictionaryExtensions.cs new file mode 100644 index 00000000000..1dd010c6a22 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/DictionaryExtensions.cs @@ -0,0 +1,655 @@ +using GitHub.Services.Common.Internal; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Reflection; +using System.Linq; + +namespace GitHub.Services.Common +{ + public static class DictionaryExtensions + { + /// + /// Adds a new value to the dictionary or updates the value if the entry already exists. + /// Returns the updated value inserted into the dictionary. + /// + public static V AddOrUpdate(this IDictionary dictionary, + K key, V addValue, Func updateValueFactory) + { + if (dictionary.TryGetValue(key, out V returnValue)) + { + addValue = updateValueFactory(returnValue, addValue); + } + + dictionary[key] = addValue; + return addValue; + } + + /// + /// Returns the value in an IDictionary at the given key, or the default + /// value for that type if it is not present. + /// + public static V GetValueOrDefault(this IDictionary dictionary, K key, V @default = default(V)) + { + V value; + return dictionary.TryGetValue(key, out value) ? value : @default; + } + + /// + /// Returns the value in an IReadOnlyDictionary at the given key, or the default + /// value for that type if it is not present. + /// + public static V GetValueOrDefault(this IReadOnlyDictionary dictionary, K key, V @default = default(V)) + { + V value; + return dictionary.TryGetValue(key, out value) ? value : @default; + } + + /// + /// Returns the value in a Dictionary at the given key, or the default + /// value for that type if it is not present. + /// + /// + /// This overload is necessary to prevent Ambiguous Match issues, as Dictionary implements both + /// IDictionary and IReadonlyDictionary, but neither interface implements the other + /// + public static V GetValueOrDefault(this Dictionary dictionary, K key, V @default = default(V)) + { + V value; + return dictionary.TryGetValue(key, out value) ? value : @default; + } + + /// + /// Returns the value in an IDictionary at the given key, or the default + /// nullable value for that type if it is not present. + /// + public static V? GetNullableValueOrDefault(this IDictionary dictionary, K key, V? @default = default(V?)) where V : struct + { + V value; + return dictionary.TryGetValue(key, out value) ? value : @default; + } + + /// + /// Returns the value in an IReadOnlyDictionary at the given key, or the default + /// nullable value for that type if it is not present. + /// + public static V? GetNullableValueOrDefault(this IReadOnlyDictionary dictionary, K key, V? @default = default(V?)) where V : struct + { + V value; + return dictionary.TryGetValue(key, out value) ? value : @default; + } + + /// + /// Returns the value in a Dictionary at the given key, or the default + /// nullable value for that type if it is not present. + /// + /// + /// This overload is necessary to prevent Ambiguous Match issues, as Dictionary implements both + /// IDictionary and IReadonlyDictionary, but neither interface implements the other + /// + public static V? GetNullableValueOrDefault(this Dictionary dictionary, K key, V? @default = default(V?)) where V : struct + { + V value; + return dictionary.TryGetValue(key, out value) ? value : @default; + } + + /// + /// Returns the value in an IReadonlyDictionary with values of type + /// casted as values of requested type, or the defualt if the key is not found or + /// if the value was found but not compatabile with the requested type. + /// + /// The key type + /// The requested type of the stored value + /// the dictionary to perform the lookup on + /// The key to lookup + /// Optional: the default value to return if not found + /// The value at the key, or the default if it is not found or of the wrong type + public static V GetCastedValueOrDefault(this IReadOnlyDictionary dictionary, K key, V @default = default(V)) + { + object value; + return dictionary.TryGetValue(key, out value) && value is V ? (V)value : @default; + } + + /// + /// Returns the value in an IDictionary with values of type + /// casted as values of requested type, or the defualt if the key is not found or + /// if the value was found but not compatabile with the requested type. + /// + /// The key type + /// The requested type of the stored value + /// the dictionary to perform the lookup on + /// The key to lookup + /// Optional: the default value to return if not found + /// The value at the key, or the default if it is not found or of the wrong type + public static V GetCastedValueOrDefault(this IDictionary dictionary, K key, V @default = default(V)) + { + object value; + return dictionary.TryGetValue(key, out value) && value is V ? (V)value : @default; + } + + /// + /// Returns the value in a Dictionary with values of type + /// casted as values of requested type, or the defualt if the key is not found or + /// if the value was found but not compatabile with the requested type. + /// + /// + /// This overload is necessary to prevent Ambiguous Match issues, as Dictionary implements both + /// IDictionary and IReadonlyDictionary, but neither interface implements the other + /// + /// The key type + /// The requested type of the stored value + /// the dictionary to perform the lookup on + /// The key to lookup + /// Optional: the default value to return if not found + /// The value at the key, or the default if it is not found or of the wrong type + public static V GetCastedValueOrDefault(this Dictionary dictionary, K key, V @default = default(V)) + { + return ((IReadOnlyDictionary)dictionary).GetCastedValueOrDefault(key, @default); + } + + /// + /// Returns the value in an IDictionary at the given key, or creates a new value using the default constructor, adds it at the given key, and returns the new value. + /// + public static V GetOrAddValue(this IDictionary dictionary, K key) where V : new() + { + V value = default(V); + + if (!dictionary.TryGetValue(key, out value)) + { + value = new V(); + dictionary.Add(key, value); + } + + return value; + } + + /// + /// Returns the value in an IDictionary at the given key, or creates a new value using the given delegate, adds it at the given key, and returns the new value. + /// + public static V GetOrAddValue(this IDictionary dictionary, K key, Func createValueToAdd) + { + V value = default(V); + + if (!dictionary.TryGetValue(key, out value)) + { + value = createValueToAdd(); + dictionary.Add(key, value); + } + + return value; + } + + /// + /// Adds all of the given key-value pairs (such as from another dictionary, since IDictionary implements IEnumerable) to this dictionary. + /// Overwrites preexisting values of the same key. + /// To avoid overwriting values, use . + /// + /// this dictionary + public static TDictionary SetRange(this TDictionary dictionary, IEnumerable> keyValuePairs) + where TDictionary : IDictionary + { + foreach (var keyValuePair in keyValuePairs) + { + dictionary[keyValuePair.Key] = keyValuePair.Value; + } + + return dictionary; + } + + /// + /// Adds all of the given key-value pairs if and only if the key-value pairs object is not null. + /// See for more details. + /// + /// this dictionary + public static TDictionary SetRangeIfRangeNotNull(this TDictionary dictionary, IEnumerable> keyValuePairs) + where TDictionary : IDictionary + { + if (keyValuePairs != null) + { + dictionary.SetRange(keyValuePairs); + } + + return dictionary; + } + + /// + /// Adds all of the given key-value pairs to this lazily initialized dictionary if and only if the key-value pairs object is not null or empty. + /// Does not initialize the dictionary otherwise. + /// See for more details. + /// + /// this dictionary + public static Lazy SetRangeIfRangeNotNullOrEmpty(this Lazy lazyDictionary, IEnumerable> keyValuePairs) + where TDictionary : IDictionary + { + if (keyValuePairs != null && keyValuePairs.Any()) + { + lazyDictionary.Value.SetRange(keyValuePairs); + } + + return lazyDictionary; + } + + /// + /// Tries to add a key to the dictionary, if it does not already exist. + /// + /// The instance where TValue is object + /// The key to add + /// The value to add + /// true if the key was added with the specified value. If the key already exists, the method returns false without updating the value. + public static bool TryAdd(this IDictionary dictionary, TKey key, TValue value) + { + if (dictionary.ContainsKey(key)) + { + return false; + } + + dictionary.Add(key, value); + return true; + } + + /// + /// Tries to add all of the given key-values pairs to the dictionary, if they do not already exist. + /// + /// The instance where TValue is object + /// The values to try and add to the dictionary + /// true if the all of the values were added. If any of the keys exists, the method returns false without updating the value. + public static bool TryAddRange(this TDictionary dictionary, IEnumerable> keyValuePairs) where TDictionary : IDictionary + { + bool rangeAdded = true; + foreach (var keyValuePair in keyValuePairs) + { + rangeAdded &= dictionary.TryAdd(keyValuePair.Key, keyValuePair.Value); + } + + return rangeAdded; + } + + /// + /// Gets the value of associated with the specified key or default value if + /// either the key is not present or the value is not of type . + /// + /// The type of the value associated with the specified key. + /// The instance where TValue is object. + /// The key whose value to get. + /// When this method returns, the value associated with the specified key, if the key is found; otherwise, the default value for the type of the value parameter. + /// true if key was found, value is non-null, and value is of type ; otherwise false. + public static bool TryGetValue(this IDictionary dictionary, string key, out T value) + { + object valueObj; + if (dictionary.TryGetValue(key, out valueObj)) + { + //Handle Guids specially + if (typeof(T) == typeof(Guid)) + { + Guid guidVal; + if (dictionary.TryGetGuid(key, out guidVal)) + { + value = (T)(object)guidVal; + return true; + } + } + + //Handle Enums specially + if (typeof(T).GetTypeInfo().IsEnum) + { + if (dictionary.TryGetEnum(key, out value)) + { + return true; + } + } + + if (valueObj is T) + { + value = (T)valueObj; + return true; + } + } + + value = default(T); + return false; + } + + /// + /// Gets the value of T associated with the specified key if the value can be converted to T according to . + /// + /// the type of the value associated with the specified key + /// the dictionary from which we should retrieve the value + /// the key of the value to retrieve + /// when this method returns, the value associated with the specified key, if the key is found and the value is convertible to T, + /// or default of T, if not + /// true if the value was retrieved successfully, otherwise false + public static bool TryGetValidatedValue(this IDictionary dictionary, string key, out T value, bool allowNull = true) + { + value = default(T); + //try to convert to T. T *must* be something with + //TypeCode != TypeCode.object (and not DBNull) OR + //byte[] or guid or object. + if (!PropertyValidation.IsValidConvertibleType(typeof(T))) + { + return false; + } + + //special case guid... + if (typeof(T) == typeof(Guid)) + { + Guid guidVal; + if (dictionary.TryGetGuid(key, out guidVal)) + { + value = (T)(object)guidVal; + return true; + } + } + else + { + object objValue = null; + if (dictionary.TryGetValue(key, out objValue)) + { + if (objValue == null) + { + //we found it and it is + //null, which may be okay depending on the allowNull flag + //value is already = default(T) + return allowNull; + } + + if (typeof(T).GetTypeInfo().IsAssignableFrom(objValue.GetType().GetTypeInfo())) + { + value = (T)objValue; + return true; + } + + if (typeof(T).GetTypeInfo().IsEnum) + { + if (dictionary.TryGetEnum(key, out value)) + { + return true; + } + } + + if (objValue is string) + { + TypeCode typeCode = Type.GetTypeCode(typeof(T)); + + try + { + value = (T)Convert.ChangeType(objValue, typeCode, CultureInfo.CurrentCulture); + return true; + } + catch (Exception) + { + return false; + } + } + } + } + + return false; + } + + /// + /// Gets the Enum value associated with the specified key if the value can be converted to an Enum. + /// + public static bool TryGetEnum(this IDictionary dictionary, string key, out T value) + { + value = default(T); + + object objValue = null; + + if (dictionary.TryGetValue(key, out objValue)) + { + if (objValue is string) + { + try + { + value = (T)Enum.Parse(typeof(T), (string)objValue, true); + return true; + } + catch (ArgumentException) + { + // Provided string is not a member of enumeration + } + } + else + { + try + { + value = (T)objValue; + return true; + } + catch (InvalidCastException) + { + // Value cannot be cast to the enum + } + } + } + + return false; + } + + /// + /// Gets the Guid value associated with the specified key if the value can be converted to a Guid. + /// + public static bool TryGetGuid(this IDictionary dictionary, string key, out Guid value) + { + value = Guid.Empty; + + object objValue = null; + + if (dictionary.TryGetValue(key, out objValue)) + { + if (objValue is Guid) + { + value = (Guid)objValue; + return true; + } + else if (objValue is string) + { + return Guid.TryParse((string)objValue, out value); + } + } + + return false; + } + + /// + /// Copies the values from this into a destination . + /// + /// The source dictionary from which to from. + /// The destination dictionary to which to copy to. + /// Optional filtering predicate. + /// The destination dictionary. + /// + /// If is null, no changes are made. + /// + public static IDictionary Copy(this IDictionary source, IDictionary dest, Predicate filter) + { + if (dest == null) + { + return dest; + } + + foreach (var key in source.Keys) + { + if (filter == null || filter(key)) + { + dest[key] = source[key]; + } + } + + return dest; + } + + /// + /// Copies the values from this into a destination . + /// + /// The source dictionary from which to from. + /// The destination dictionary to which to copy to. + /// The destination dictionary. + /// + /// If is null, no changes are made. + /// + public static IDictionary Copy(this IDictionary source, IDictionary dest) + { + return source.Copy(dest, filter: null); + } + + /// + /// Sets the given key-value pair if and only if the value is not null. + /// + public static IDictionary SetIfNotNull( + this IDictionary dictionary, + TKey key, + TValue value) + where TValue : class + { + if (value != null) + { + dictionary[key] = value; + } + + return dictionary; + } + + /// + /// Sets the given key-value pair on this lazily initialized dictionary if and only if the value is not null. + /// Does not initialize the dictionary otherwise. + /// + public static Lazy> SetIfNotNull( + this Lazy> dictionary, + TKey key, + TValue value) + where TValue : class + { + if (value != null) + { + dictionary.Value[key] = value; + } + + return dictionary; + } + + /// + /// Adds the given key-value pair to this dictionary if the value is nonnull + /// and does not conflict with a preexisting value for the same key. + /// No-ops if the value is null. + /// No-ops if the preexisting value for the same key is equal to the given value. + /// Throws if the preexisting value for the same key is not equal to the given value. + /// + public static IDictionary SetIfNotNullAndNotConflicting( + this IDictionary dictionary, + TKey key, + TValue value, + string valuePropertyName = "value", + string dictionaryName = "dictionary") + where TValue : class + { + if (value == null) + { + return dictionary; + } + + dictionary.CheckForConflict(key, value, valuePropertyName, dictionaryName, ignoreDefaultValue: true); + + dictionary[key] = value; + + return dictionary; + } + + /// + /// Adds the given key-value pair to this dictionary if the value does not conflict with a preexisting value for the same key. + /// No-ops if the preexisting value for the same key is equal to the given value. + /// Throws if the preexisting value for the same key is not equal to the given value. + /// + public static IDictionary SetIfNotConflicting( + this IDictionary dictionary, + TKey key, + TValue value, + string valuePropertyName = "value", + string dictionaryName = "dictionary") + { + dictionary.CheckForConflict(key, value, valuePropertyName, dictionaryName, ignoreDefaultValue: false); + + dictionary[key] = value; + + return dictionary; + } + + /// + /// Throws if this IDictionary contains a preexisting value for the same key which is not equal to the given key. + /// + public static void CheckForConflict( + this IDictionary dictionary, + TKey key, + TValue value, + string valuePropertyName = "value", + string dictionaryName = "dictionary", + bool ignoreDefaultValue = true) + { + if (Equals(value, default(TValue)) && ignoreDefaultValue) + { + return; + } + + TValue previousValue = default(TValue); + + if (!dictionary.TryGetValue(key, out previousValue)) + { + return; + } + + if (Equals(previousValue, default(TValue)) && ignoreDefaultValue) + { + return; + } + + if (Equals(value, previousValue)) + { + return; + } + + throw new ArgumentException( + String.Format(CultureInfo.CurrentCulture, + "Parameter {0} = '{1}' inconsistent with {2}['{3}'] => '{4}'", + valuePropertyName, value, dictionaryName, key, previousValue)); + } + + /// + /// Throws if this IReadOnlyDictionary contains a preexisting value for the same key which is not equal to the given key. + /// + public static void CheckForConflict( + this IReadOnlyDictionary dictionary, + TKey key, + TValue value, + string valuePropertyName = "value", + string dictionaryName = "dictionary", + bool ignoreDefaultValue = true) + { + if (Equals(value, default(TValue)) && ignoreDefaultValue) + { + return; + } + + TValue previousValue = default(TValue); + + if (!dictionary.TryGetValue(key, out previousValue)) + { + return; + } + + if (Equals(previousValue, default(TValue)) && ignoreDefaultValue) + { + return; + } + + if (Equals(value, previousValue)) + { + return; + } + + throw new ArgumentException( + String.Format(CultureInfo.CurrentCulture, + "Parameter {0} = \"{1}\" is inconsistent with {2}[\"{3}\"] => \"{4}\"", + valuePropertyName, value, dictionaryName, key, previousValue)); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/EnumerableExtensions.cs b/src/Sdk/Common/Common/Utility/EnumerableExtensions.cs new file mode 100644 index 00000000000..470317c6cab --- /dev/null +++ b/src/Sdk/Common/Common/Utility/EnumerableExtensions.cs @@ -0,0 +1,422 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.Linq; + +namespace GitHub.Services.Common +{ + public static class EnumerableExtensions + { + /// + /// Returns an empty if the supplied source is null. + /// + /// The type of the elements of source. + /// A sequence of values to return when not null. + /// The source sequence, or a new empty one if source was null. + public static IEnumerable AsEmptyIfNull(this IEnumerable source) + => source ?? Enumerable.Empty(); + + /// + /// If an enumerable is null, and it has a default constructor, return an empty collection by calling the + /// default constructor. + /// + /// The type of the Enumerable + /// A sequence of values to return when not null + /// The source sequence, or a new empty one if source was null. + public static TEnumerable AsEmptyIfNull(this TEnumerable source) where TEnumerable : class, IEnumerable, new() + => source ?? new TEnumerable(); + + /// + /// Splits a source into several s + /// with a max size of batchSize. + /// Note that batchSize must be one or larger. + /// + /// A sequence of values to split into smaller batches. + /// The number of elements to place in each batch. + /// The original collection, split into batches. + public static IEnumerable> Batch(this IEnumerable source, int batchSize) + { + ArgumentUtility.CheckForNull(source, nameof(source)); + ArgumentUtility.CheckBoundsInclusive(batchSize, 1, int.MaxValue, nameof(batchSize)); + + var nextBatch = new List(batchSize); + foreach (T item in source) + { + nextBatch.Add(item); + if (nextBatch.Count == batchSize) + { + yield return nextBatch; + nextBatch = new List(batchSize); + } + } + + if (nextBatch.Count > 0) + { + yield return nextBatch; + } + } + + /// + /// Splits an into two partitions, determined by the supplied predicate. Those + /// that follow the predicate are returned in the first, with the remaining elements in the second. + /// + /// The type of the elements of source. + /// The source enumerable to partition. + /// The predicate applied to filter the items into their partitions. + /// An object containing the matching and nonmatching results. + public static PartitionResults Partition(this IEnumerable source, Predicate predicate) + { + ArgumentUtility.CheckForNull(source, nameof(source)); + ArgumentUtility.CheckForNull(predicate, nameof(predicate)); + + var results = new PartitionResults(); + + foreach (var item in source) + { + if (predicate(item)) + { + results.MatchingPartition.Add(item); + } + else + { + results.NonMatchingPartition.Add(item); + } + } + + return results; + } + + /// + /// Partitions items from a source IEnumerable into N+1 lists, where the first N lists are determened + /// by the sequential check of the provided predicates, with the N+1 list containing those items + /// which matched none of the provided predicates. + /// + /// The type of the elements in source. + /// The source containing the elements to partition + /// The predicates to determine which list the results end up in + /// An item containing the matching collections and a collection containing the non-matching items. + public static MultiPartitionResults Partition(this IEnumerable source, params Predicate[] predicates) + { + ArgumentUtility.CheckForNull(source, nameof(source)); + ArgumentUtility.CheckForNull(predicates, nameof(predicates)); + + var range = Enumerable.Range(0, predicates.Length).ToList(); + + var results = new MultiPartitionResults(); + results.MatchingPartitions.AddRange(range.Select(_ => new List())); + + foreach (var item in source) + { + bool added = false; + + foreach (var predicateIndex in range.Where(predicateIndex => predicates[predicateIndex](item))) + { + results.MatchingPartitions[predicateIndex].Add(item); + added = true; + break; + } + + if (!added) + { + results.NonMatchingPartition.Add(item); + } + } + + return results; + } + + /// + /// Merges two sorted IEnumerables using the given comparison function which + /// defines a total ordering of the data. + /// + public static IEnumerable Merge( + this IEnumerable first, + IEnumerable second, + IComparer comparer) + { + return Merge(first, second, comparer == null ? (Func)null : comparer.Compare); + } + + /// + /// Merges two sorted IEnumerables using the given comparison function which + /// defines a total ordering of the data. + /// + public static IEnumerable Merge( + this IEnumerable first, + IEnumerable second, + Func comparer) + { + ArgumentUtility.CheckForNull(first, nameof(first)); + ArgumentUtility.CheckForNull(second, nameof(second)); + ArgumentUtility.CheckForNull(comparer, nameof(comparer)); + + using (IEnumerator e1 = first.GetEnumerator()) + using (IEnumerator e2 = second.GetEnumerator()) + { + bool e1Valid = e1.MoveNext(); + bool e2Valid = e2.MoveNext(); + + while (e1Valid && e2Valid) + { + if (comparer(e1.Current, e2.Current) <= 0) + { + yield return e1.Current; + + e1Valid = e1.MoveNext(); + } + else + { + yield return e2.Current; + + e2Valid = e2.MoveNext(); + } + } + + while (e1Valid) + { + yield return e1.Current; + + e1Valid = e1.MoveNext(); + } + + while (e2Valid) + { + yield return e2.Current; + + e2Valid = e2.MoveNext(); + } + } + } + + /// + /// Merges two sorted IEnumerables using the given comparison function which defines a total ordering of the data. Unlike Merge, this method requires that + /// both IEnumerables contain distinct elements. Likewise, the returned IEnumerable will only contain distinct elements. If the same element appears in both inputs, + /// it will appear only once in the output. + /// + /// Example: + /// first: [1, 3, 5] + /// second: [4, 5, 7] + /// result: [1, 3, 4, 5, 7] + /// + public static IEnumerable MergeDistinct( + this IEnumerable first, + IEnumerable second, + IComparer comparer) + { + return MergeDistinct(first, second, comparer == null ? (Func)null : comparer.Compare); + } + + /// + /// Merges two sorted IEnumerables using the given comparison function which defines a total ordering of the data. Unlike Merge, this method requires that + /// both IEnumerables contain distinct elements. Likewise, the returned IEnumerable will only contain distinct elements. If the same element appears in both inputs, + /// it will appear only once in the output. + /// + /// Example: + /// first: [1, 3, 5] + /// second: [4, 5, 7] + /// result: [1, 3, 4, 5, 7] + /// + public static IEnumerable MergeDistinct( + this IEnumerable first, + IEnumerable second, + Func comparer) + { + ArgumentUtility.CheckForNull(first, nameof(first)); + ArgumentUtility.CheckForNull(second, nameof(second)); + ArgumentUtility.CheckForNull(comparer, nameof(comparer)); + + using (IEnumerator e1 = first.GetEnumerator()) + using (IEnumerator e2 = second.GetEnumerator()) + { + bool e1Valid = e1.MoveNext(); + bool e2Valid = e2.MoveNext(); + + while (e1Valid && e2Valid) + { + if (comparer(e1.Current, e2.Current) < 0) + { + yield return e1.Current; + + e1Valid = e1.MoveNext(); + } + else if (comparer(e1.Current, e2.Current) > 0) + { + yield return e2.Current; + + e2Valid = e2.MoveNext(); + } + else + { + yield return e1.Current; + + e1Valid = e1.MoveNext(); + e2Valid = e2.MoveNext(); + } + } + + while (e1Valid) + { + yield return e1.Current; + + e1Valid = e1.MoveNext(); + } + + while (e2Valid) + { + yield return e2.Current; + + e2Valid = e2.MoveNext(); + } + } + } + + /// + /// Creates a HashSet based on the elements in . + /// + public static HashSet ToHashSet( + IEnumerable source) + { + return new HashSet(source); + } + + /// + /// Creates a HashSet with equality comparer based on the elements + /// in . + /// + public static HashSet ToHashSet( + IEnumerable source, + IEqualityComparer comparer) + { + return new HashSet(source, comparer); + } + + /// + /// Creates a HashSet based on the elements in , using transformation + /// function . + /// + public static HashSet ToHashSet( + this IEnumerable source, + Func selector) + { + return new HashSet(source.Select(selector)); + } + + /// + /// Creates a HashSet with equality comparer based on the elements + /// in , using transformation function . + /// + public static HashSet ToHashSet( + this IEnumerable source, + Func selector, + IEqualityComparer comparer) + { + return new HashSet(source.Select(selector), comparer); + } + + /// + /// Executes the specified action to each of the items in the collection + /// The type of the elements in the collection. + /// The collection on which the action will be performed + /// The action to be performed + /// + public static void ForEach(this IEnumerable collection, Action action) + { + ArgumentUtility.CheckForNull(action, nameof(action)); + ArgumentUtility.CheckForNull(collection, nameof(collection)); + + foreach (T item in collection) + { + action(item); + } + } + + /// + /// Add the item to the List if the condition is satisfied + /// + /// The type of the elements in the collection. + /// The collection on which the action will be performed + /// The Condition under which the item will be added + /// The element to be added + public static void AddIf(this List list, bool condition, T element) + { + if (condition) + { + list.Add(element); + } + } + + /// + /// Converts a collection of key-value string pairs to a NameValueCollection. + /// + /// The key-value string pairs. + /// The NameValueCollection. + public static NameValueCollection ToNameValueCollection(this IEnumerable> pairs) + { + NameValueCollection collection = new NameValueCollection(); + + foreach (KeyValuePair pair in pairs) + { + collection.Add(pair.Key, pair.Value); + } + + return collection; + } + + public static IList

PartitionSolveAndMergeBack(this IList source, Predicate predicate, Func, IList

> matchingPartitionSolver, Func, IList

> nonMatchingPartitionSolver) + { + ArgumentUtility.CheckForNull(source, nameof(source)); + ArgumentUtility.CheckForNull(predicate, nameof(predicate)); + ArgumentUtility.CheckForNull(matchingPartitionSolver, nameof(matchingPartitionSolver)); + ArgumentUtility.CheckForNull(nonMatchingPartitionSolver, nameof(nonMatchingPartitionSolver)); + + var partitionedSource = new PartitionResults>(); + + for (int sourceCnt = 0; sourceCnt < source.Count; sourceCnt++) + { + var item = source[sourceCnt]; + + if (predicate(item)) + { + partitionedSource.MatchingPartition.Add(new Tuple(sourceCnt, item)); + } + else + { + partitionedSource.NonMatchingPartition.Add(new Tuple(sourceCnt, item)); + } + } + + var solvedResult = new List

(source.Count); + if (partitionedSource.MatchingPartition.Any()) + { + solvedResult.AddRange(matchingPartitionSolver(partitionedSource.MatchingPartition.Select(x => x.Item2).ToList())); + } + + if (partitionedSource.NonMatchingPartition.Any()) + { + solvedResult.AddRange(nonMatchingPartitionSolver(partitionedSource.NonMatchingPartition.Select(x => x.Item2).ToList())); + } + + var result = Enumerable.Repeat(default(P), source.Count).ToList(); + + if (solvedResult.Count != source.Count) + { + return solvedResult; // either we can throw here or just return solvedResult and ignore! + } + + for (int resultCnt = 0; resultCnt < source.Count; resultCnt++) + { + if (resultCnt < partitionedSource.MatchingPartition.Count) + { + result[partitionedSource.MatchingPartition[resultCnt].Item1] = solvedResult[resultCnt]; + } + else + { + result[partitionedSource.NonMatchingPartition[resultCnt - partitionedSource.MatchingPartition.Count].Item1] = solvedResult[resultCnt]; + } + } + + return result; + } + } +} diff --git a/src/Sdk/Common/Common/Utility/ExpectedExceptionExtensions.cs b/src/Sdk/Common/Common/Utility/ExpectedExceptionExtensions.cs new file mode 100644 index 00000000000..c9dab13f706 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/ExpectedExceptionExtensions.cs @@ -0,0 +1,62 @@ +using System; + +namespace GitHub.Services.Common +{ + public static class ExpectedExceptionExtensions + { + private const string c_expectedKey = "isExpected"; + + ///

+ /// Mark the exception as expected when caused by user input in the provided area. + /// If the exception thrower is the same area as the caller, the exception will be treated as expected. + /// However, in the case of a service to service call, then the exception will be treated as unexpected. + /// ex: GitRefsController throws ArgumentException called directly by a user then the exception will be expected + /// GitRefsController throws ArgumentException called by BuildDefinitionController then the exception will not be expected. + /// + /// + /// This allows for the use case "throw new ArgumentException().Expected(c_area)" + /// This will overwrite the expected area if called a second time. + /// This should not throw any exceptions as to avoid hiding the exception that was already caught. + /// See https://vsowiki.com/index.php?title=Whitelisting_Expected_Commands_and_Exceptions + /// + /// The area name where the exception is expected. This will be compared against IVssRequestContext.ServiceName. Area should be non-empty + /// after setting the area + public static Exception Expected(this Exception ex, string area) + { + if (!string.IsNullOrEmpty(area)) + { + ex.Data[c_expectedKey] = area; + } + + return ex; + } + + /// + /// Use this to "expect" an exception within the exception filtering syntax. + /// ex: + /// catch(ArgumentException ex) when (ex.ExpectedExceptionFilter(c_area)) + /// See + /// + /// false always + public static bool ExpectedExceptionFilter(this Exception ex, string area) + { + ex.Expected(area); + return false; + } + + /// + /// Determine if the exception is expected in the specified area. + /// Case is ignored for the area comparison. + /// + public static bool IsExpected(this Exception ex, string area) + { + if (string.IsNullOrEmpty(area)) + { + return false; + } + + // An exception's Data property is an IDictionary, which returns null for keys that do not exist. + return area.Equals(ex.Data[c_expectedKey] as string, StringComparison.OrdinalIgnoreCase); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/HttpHeaders.cs b/src/Sdk/Common/Common/Utility/HttpHeaders.cs new file mode 100644 index 00000000000..e91d0f28a2b --- /dev/null +++ b/src/Sdk/Common/Common/Utility/HttpHeaders.cs @@ -0,0 +1,91 @@ +using System; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; + +namespace GitHub.Services.Common.Internal +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class HttpHeaders + { + public const String ActivityId = "ActivityId"; + public const String ETag = "ETag"; + public const String TfsVersion = "X-TFS-Version"; + public const String TfsRedirect = "X-TFS-Redirect"; + public const String TfsException = "X-TFS-Exception"; + public const String TfsServiceError = "X-TFS-ServiceError"; + public const String TfsSessionHeader = "X-TFS-Session"; + public const String TfsSoapException = "X-TFS-SoapException"; + public const String TfsFedAuthRealm = "X-TFS-FedAuthRealm"; + public const String TfsFedAuthIssuer = "X-TFS-FedAuthIssuer"; + public const String TfsFedAuthRedirect = "X-TFS-FedAuthRedirect"; + public const String VssAuthorizationEndpoint = "X-VSS-AuthorizationEndpoint"; + public const String VssPageHandlers = "X-VSS-PageHandlers"; + public const String VssE2EID = "X-VSS-E2EID"; + public const String VssOrchestrationId = "X-VSS-OrchestrationId"; + public const String AuditCorrelationId = "X-VSS-Audit-CorrelationId"; + public const String VssOriginUserAgent = "X-VSS-OriginUserAgent"; + + // Internal Headers that we use in our client. + public const string TfsInstanceHeader = "X-TFS-Instance"; + public const string TfsVersionOneHeader = "X-VersionControl-Instance"; + + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Tfs")] + public const string TfsImpersonate = "X-TFS-Impersonate"; + public const string TfsSubjectDescriptorImpersonate = "X-TFS-SubjectDescriptorImpersonate"; + + public const string MsContinuationToken = "X-MS-ContinuationToken"; + public const String VssUserData = "X-VSS-UserData"; + public const String VssAgentHeader = "X-VSS-Agent"; + public const String VssAuthenticateError = "X-VSS-AuthenticateError"; + public const string VssReauthenticationAction = "X-VSS-ReauthenticationAction"; + public const string RequestedWith = "X-Requested-With"; + + public const String VssRateLimitResource = "X-RateLimit-Resource"; + public const String VssRateLimitDelay = "X-RateLimit-Delay"; + public const String VssRateLimitLimit = "X-RateLimit-Limit"; + public const String VssRateLimitRemaining = "X-RateLimit-Remaining"; + public const String VssRateLimitReset = "X-RateLimit-Reset"; + public const String RetryAfter = "Retry-After"; + + public const String VssGlobalMessage = "X-VSS-GlobalMessage"; + + public const String VssRequestRouted = "X-VSS-RequestRouted"; + public const String VssUseRequestRouting = "X-VSS-UseRequestRouting"; + + public const string VssResourceTenant = "X-VSS-ResourceTenant"; + public const String VssOverridePrompt = "X-VSS-OverridePrompt"; + + public const String VssOAuthS2STargetService = "X-VSS-S2STargetService"; + public const String VssHostOfflineError = "X-VSS-HostOfflineError"; + + public const string VssForceMsaPassThrough = "X-VSS-ForceMsaPassThrough"; + public const string VssRequestPriority = "X-VSS-RequestPriority"; + + // This header represents set of ';' delimited mappings (usually one) that are considered by DetermineAccessMapping API + public const string VssClientAccessMapping = "X-VSS-ClientAccessMapping"; + + // This header is used to download artifacts anonymously. + // N.B. Some resources secured with download tickets (e.g. TFVC files) are still retrieved with the download + // ticket in the query string. + public const string VssDownloadTicket = "X-VSS-DownloadTicket"; + + public const string IfModifiedSince = "If-Modified-Since"; + public const string Authorization = "Authorization"; + public const string Location = "Location"; + public const string ProxyAuthenticate = "Proxy-Authenticate"; + public const string WwwAuthenticate = "WWW-Authenticate"; + + public const string AfdIncomingRouteKey = "X-FD-RouteKey"; + public const string AfdOutgoingRouteKey = "X-AS-RouteKey"; + public const string AfdIncomingEndpointList = "X-FD-RouteKeyApplicationEndpointList"; + public const string AfdOutgoingEndpointList = "X-AS-RouteKeyApplicationEndpointList"; + public const string AfdResponseRef = "X-MSEdge-Ref"; + public const string AfdIncomingClientIp = "X-FD-ClientIP"; + public const string AfdIncomingSocketIp = "X-FD-SocketIP"; + public const string AfdIncomingRef = "X-FD-Ref"; + public const string AfdIncomingEventId = "X-FD-EventId"; + public const string AfdIncomingEdgeEnvironment = "X-FD-EdgeEnvironment"; + public const string AfdOutgoingQualityOfResponse = "X-AS-QualityOfResponse"; + public const string AfdOutgoingClientIp = "X-MSEdge-ClientIP"; + } +} diff --git a/src/Sdk/Common/Common/Utility/LongPathUtility.cs b/src/Sdk/Common/Common/Utility/LongPathUtility.cs new file mode 100644 index 00000000000..f179bbe9b66 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/LongPathUtility.cs @@ -0,0 +1,553 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Runtime.ConstrainedExecution; +using System.Runtime.InteropServices; +using System.Text; +using System.Text.RegularExpressions; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + /// + /// Provides path normalization/expansion for absolute, relative and UNC-style paths + /// and supports paths that contain more than 248 characters. + /// + /// + /// This utility class can be used in place of the .NET Path and Directory classes + /// that throw System.IO.PathTooLongException when paths are longer than 248 characters + /// + public static class LongPathUtility + { + private static Regex AbsolutePathRegEx = new Regex(@"^([a-zA-Z]:\\|\\\\)", RegexOptions.CultureInvariant | RegexOptions.Compiled); + private const int ERROR_FILE_NOT_FOUND = 2; + + /// + /// Returns a list of directory names under the path specified, and optionally all subdirectories + /// + /// The directory to search + /// Specifies whether the search operation should include only the currect directory or all subdirectories + /// A list of all subdirectories + public static IEnumerable EnumerateDirectories(string path, bool recursiveSearch) + { + var directoryPaths = new List(); + EnumerateDirectoriesInternal(directoryPaths, path, recursiveSearch); + return directoryPaths; + } + + /// + /// Returns a list of file names under the path specified, and optionally within all subdirectories. + /// + /// The directory to search + /// Specifies whether the search operation should include only the current directory or all subdirectories + /// + /// A list of full file names(including path) contained in the directory specified that match the specified search pattern. + public static IEnumerable EnumerateFiles(string path, bool recursiveSearch) + { + return EnumerateFiles(path, "*", recursiveSearch); + } + + /// + /// Returns an enumerable collection of file names that match a search pattern in a specified path, + /// and optionally searches subdirectories. + /// + /// The directory to search + /// The search string to match against the names of the files + /// Specifies whether the search operation should include only the current directory or all subdirectories + /// + /// A list of full file names(including path) contained in the directory specified (and subdirectories optionally) that match the specified pattern. + /// + public static IEnumerable EnumerateFiles(string path, string matchPattern, bool recursiveSearch) + { + if (!DirectoryExists(path)) + { + throw new DirectoryNotFoundException($"The path '{path}' is not a valid directory."); + } + + var filePaths = new List(); + EnumerateFilesInternal(filePaths, path, matchPattern, recursiveSearch); + return filePaths; + } + + /// + /// Returns true/false whether the file exists. This method inspects the + /// file system attributes and supports files without extensions (ex: DIRS, Sources). This method + /// supports file paths that are longer than 260 characters. + /// + /// The file path to inspect + /// + /// True if the file exists or false if not + /// + public static bool FileExists(string filePath) + { + return FileOrDirectoryExists(filePath, isDirectory: false); + } + + /// + /// Returns true/false whether the directory exists. This method inspects the + /// file system attributes and supports files without extensions (ex: DIRS, Sources). This method + /// supports file paths that are longer than 260 characters. + /// + /// The file path to inspect + /// + /// True if the directory exists or false if not + /// + public static bool DirectoryExists(string directoryPath) + { + return FileOrDirectoryExists(directoryPath, isDirectory: true); + } + + private static bool FileOrDirectoryExists(string filePath, bool isDirectory) + { + if (String.IsNullOrWhiteSpace(filePath)) + { + throw new ArgumentException("A path to the file is required and cannot be null, empty or whitespace", "filePath"); + } + + bool pathExists = false; + + // File names may or may not include an extension (ex: DIRS, Sources). We have to look at the attributes + // on the file system object in order to distinguish a directory from a file + var attributes = (FlagsAndAttributes)NativeMethods.GetFileAttributes(filePath); + + if (attributes != FlagsAndAttributes.InvalidFileAttributes) + { + bool pathIsDirectory = (attributes & FlagsAndAttributes.Directory) == FlagsAndAttributes.Directory; + + if (pathIsDirectory == isDirectory) + { + pathExists = true; + } + } + + return pathExists; + } + + /// + /// Returns the fully expanded/normalized path. This method supports paths that are + /// longer than 248 characters. + /// + /// The file or directory path + /// + public static string GetFullNormalizedPath(string path) + { + if (String.IsNullOrWhiteSpace(path)) + { + throw new ArgumentException("A path is required and cannot be null, empty or whitespace", "path"); + } + + string outPath = path; + + // We need the length of the absolute path in order to prepare a buffer of + // the correct size + uint bufferSize = NativeMethods.GetFullPathName(path, 0, null, null); + int lastWin32Error = Marshal.GetLastWin32Error(); + + if (bufferSize > 0) + { + var absolutePath = new StringBuilder((int)bufferSize); + uint length = NativeMethods.GetFullPathName(path, bufferSize, absolutePath, null); + lastWin32Error = Marshal.GetLastWin32Error(); + + if (length > 0) + { + outPath = absolutePath.ToString(); + } + else + { + // Path resolution failed + throw new Win32Exception( + lastWin32Error, + String.Format( + CultureInfo.InvariantCulture, + "Path normalization/expansion failed. The path length was not returned by the Kernel32 subsystem for '{0}'.", + path + ) + ); + } + } + else + { + // Path resolution failed and the path length could not + // be determined + throw new Win32Exception( + lastWin32Error, + String.Format( + CultureInfo.InvariantCulture, + "Path normalization/expansion failed. A full path was not returned by the Kernel32 subsystem for '{0}'.", + path + ) + ); + } + + return outPath != null ? outPath.TrimEnd('\\') : null; + } + + /// + /// Determines whether the specified path is an absolute path or not. + /// + /// The path to be tested. + /// + /// true if the path is absolute; otherwise, false. + /// + public static bool IsAbsolutePath(string path) + { + return LongPathUtility.AbsolutePathRegEx.Match(path).Success; + } + + public static string RemoveExtendedLengthPathPrefix(string inPath) + { + string outPath = inPath; + + if (!String.IsNullOrWhiteSpace(inPath)) + { + if (inPath.StartsWith("\\", StringComparison.OrdinalIgnoreCase)) + { + // ex: \\?\UNC\server\share to \\server\share + outPath = inPath.Replace(@"\\?\UNC", @"\"); + + // ex: \\?\c:\windows to c:\windows + outPath = outPath.Replace(@"\\?\", String.Empty); + } + } + + return outPath; + } + + private static string CombinePaths(string pathA, string pathB) + { + if (pathA == null) + { + throw new ArgumentNullException("pathA"); + } + + if (pathB == null) + { + throw new ArgumentNullException("pathB"); + } + + // The Path class does not suffer from the 248/260 character limitation + // that the File and Directory classes do. + return Path.Combine( + pathA.TrimEnd('\\'), + pathB.TrimStart('\\') + ); + } + + private static string ConvertToExtendedLengthPath(string path) + { + string extendedLengthPath = GetFullNormalizedPath(path); + + if (!String.IsNullOrWhiteSpace(extendedLengthPath)) + { + //no need to modify- it's already unicode + if (!extendedLengthPath.StartsWith(@"\\?", StringComparison.OrdinalIgnoreCase)) + { + // ex: \\server\share + if (extendedLengthPath.StartsWith(@"\\", StringComparison.OrdinalIgnoreCase)) + { + // make it \\?\UNC\server\share + extendedLengthPath = String.Format( + CultureInfo.InvariantCulture, + @"\\?\UNC{0}", + extendedLengthPath.Substring(1) + ); + } + else //not unicode already, and not UNC + { + extendedLengthPath = String.Format( + CultureInfo.InvariantCulture, + @"\\?\{0}", + extendedLengthPath + ); + } + } + } + + return extendedLengthPath; + } + + private static IEnumerable EnumerateDirectoriesInPath(string path) + { + SafeFindHandle handle = null; + var findData = new FindData(); + var childDirectories = new List(); + + using (handle = NativeMethods.FindFirstFile(CombinePaths(ConvertToExtendedLengthPath(path), "*"), findData)) + { + if (!handle.IsInvalid) + { + bool searchComplete = false; + + do + { + // skip the dot directories + if (!findData.fileName.Equals(@".") && !findData.fileName.Equals(@"..")) + { + if ((findData.fileAttributes & (int)FileAttributes.Directory) != 0) + { + childDirectories.Add(RemoveExtendedLengthPathPrefix(CombinePaths(path, findData.fileName))); + } + } + + if (NativeMethods.FindNextFile(handle, findData)) + { + if (handle.IsInvalid) + { + throw new Win32Exception( + Marshal.GetLastWin32Error(), + String.Format( + CultureInfo.InvariantCulture, + "Enumerating subdirectories for path '{0}' failed.", + path + ) + ); + } + } + else + { + searchComplete = true; + } + + } while (!searchComplete); + } + } + + return childDirectories; + } + + private static IEnumerable EnumerateFilesInPath(string path, string matchPattern) + { + SafeFindHandle handle = null; + var findData = new FindData(); + var fullFilePaths = new List(); + + using (handle = NativeMethods.FindFirstFile(CombinePaths(ConvertToExtendedLengthPath(path), matchPattern), findData)) + { + int lastWin32Error = Marshal.GetLastWin32Error(); + + if (handle.IsInvalid) + { + if (lastWin32Error != ERROR_FILE_NOT_FOUND) + { + throw new Win32Exception( + lastWin32Error, + String.Format(CultureInfo.InvariantCulture, "Enumerating files for path '{0}' failed.", path) + ); + } + } + else + { + bool searchComplete = false; + + do + { + // skip the dot directories + if (!findData.fileName.Equals(@".") && !findData.fileName.Equals(@"..")) + { + if ((findData.fileAttributes & (int)FileAttributes.Directory) == 0) + { + fullFilePaths.Add(RemoveExtendedLengthPathPrefix(CombinePaths(path, findData.fileName))); + } + } + + if (NativeMethods.FindNextFile(handle, findData)) + { + lastWin32Error = Marshal.GetLastWin32Error(); + + if (handle.IsInvalid) + { + throw new Win32Exception( + lastWin32Error, + String.Format( + CultureInfo.InvariantCulture, + "Enumerating subdirectories for path '{0}' failed.", + path + ) + ); + } + } + else + { + searchComplete = true; + } + + } while (!searchComplete); + } + } + + return fullFilePaths; + } + + private static void EnumerateFilesInternal(List filePaths, string path, string matchPattern, bool recursiveSearch) + { + var fullFilePaths = EnumerateFilesInPath(path, matchPattern); + if (fullFilePaths.Any()) + { + lock (filePaths) + { + filePaths.AddRange(fullFilePaths); + } + } + + if (recursiveSearch) + { + var directorySearchPaths = EnumerateDirectoriesInPath(path); + if (directorySearchPaths.Any()) + { + Parallel.ForEach( + directorySearchPaths, + (searchPath) => + { + EnumerateFilesInternal(filePaths, searchPath, matchPattern, recursiveSearch); + } + ); + } + } + } + + public static void EnumerateDirectoriesInternal(List directoryPaths, string path, bool recursiveSearch) + { + var directorySearchPaths = EnumerateDirectoriesInPath(path); + if (directorySearchPaths.Any()) + { + lock (directoryPaths) + { + directoryPaths.AddRange(directorySearchPaths); + } + + if (recursiveSearch) + { + // This will not ensure that the directory paths are added to the list + // in alphabetical order but does provide performance 2 - 4 times better than the + // canonical Directory.GetDirectories() method. + Parallel.ForEach( + directorySearchPaths, + (searchPath) => + { + EnumerateDirectoriesInternal(directoryPaths, searchPath, recursiveSearch); + } + ); + } + } + } + + /// + /// Kernel32.dll native interop methods for use with utility file/path parsing + /// operations + /// + private static class NativeMethods + { + private const string Kernel32Dll = "kernel32.dll"; + + [DllImport(Kernel32Dll, CharSet = CharSet.Unicode, BestFitMapping = false, ThrowOnUnmappableChar = true)] + [return: MarshalAs(UnmanagedType.Bool)] + public static extern bool FindClose(IntPtr hFindFile); + + [SuppressMessage("Microsoft.Globalization", "CA2101:SpecifyMarshalingForPInvokeStringArguments", MessageId = "FindData.alternateFileName")] + [SuppressMessage("Microsoft.Globalization", "CA2101:SpecifyMarshalingForPInvokeStringArguments", MessageId = "FindData.fileName")] + [DllImport(Kernel32Dll, CharSet = CharSet.Unicode, BestFitMapping = false, ThrowOnUnmappableChar = true, SetLastError = true)] + public static extern SafeFindHandle FindFirstFile( + [MarshalAs(UnmanagedType.LPTStr)] + string fileName, + [In, Out] FindData findFileData + ); + [SuppressMessage("Microsoft.Globalization", "CA2101:SpecifyMarshalingForPInvokeStringArguments", MessageId = "FindData.alternateFileName")] + [SuppressMessage("Microsoft.Globalization", "CA2101:SpecifyMarshalingForPInvokeStringArguments", MessageId = "FindData.fileName")] + [DllImport(Kernel32Dll, CharSet = CharSet.Unicode, BestFitMapping = false, ThrowOnUnmappableChar = true, SetLastError = true)] + [return: MarshalAs(UnmanagedType.Bool)] + public static extern bool FindNextFile(SafeFindHandle hFindFile, [In, Out] FindData lpFindFileData); + + [DllImport(Kernel32Dll, CharSet = CharSet.Unicode, BestFitMapping = false, ThrowOnUnmappableChar = true, SetLastError = true)] + public static extern int GetFileAttributes(string lpFileName); + + [DllImport(Kernel32Dll, CharSet = CharSet.Unicode, BestFitMapping = false, ThrowOnUnmappableChar = true, SetLastError = true)] + public static extern uint GetFullPathName( + [MarshalAs(UnmanagedType.LPTStr)] + string lpFileName, + uint nBufferLength, + [Out] + StringBuilder lpBuffer, + StringBuilder lpFilePart + ); + } + + //for mapping to the WIN32_FIND_DATA native structure + [SuppressMessage("StyleCop.CSharp.NamingRules", "SA1307:AccessibleFieldsMustBeginWithUpperCaseLetter", Justification = "Reviewed.")] + [SuppressMessage("StyleCop.CSharp.MaintainabilityRules", "SA1401:FieldsMustBePrivate", Justification = "Reviewed.")] + [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)] + private sealed class FindData + { + // NOTE: + // Although it may seem correct to Marshal the string members of this class as UnmanagedType.LPWStr, they + // must explicitly remain UnmanagedType.ByValTStr with the size constraints noted. Otherwise we end up with + // COM Interop exceptions while trying to marshal the data across the PInvoke boundaries. We thus require the StyleCop + // suppressions on the NativeMethods.FindNextFile() method above. + public int fileAttributes; + public System.Runtime.InteropServices.ComTypes.FILETIME creationTime; + public System.Runtime.InteropServices.ComTypes.FILETIME lastAccessTime; + public System.Runtime.InteropServices.ComTypes.FILETIME lastWriteTime; + public int nFileSizeHigh; + public int nFileSizeLow; + public int dwReserved0; + public int dwReserved1; + [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 260)] + public string fileName; + [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 14)] + public string alternateFileName; + } + + //A Win32 safe find handle in which a return value of -1 indicates it's invalid + private sealed class SafeFindHandle : Microsoft.Win32.SafeHandles.SafeHandleMinusOneIsInvalid + { + public SafeFindHandle() + : base(true) + { + return; + } + + [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] + protected override bool ReleaseHandle() + { + return NativeMethods.FindClose(handle); + } + } + + [Flags] + private enum FlagsAndAttributes : uint + { + None = 0x00000000, + Readonly = 0x00000001, + Hidden = 0x00000002, + System = 0x00000004, + Directory = 0x00000010, + Archive = 0x00000020, + Device = 0x00000040, + Normal = 0x00000080, + Temporary = 0x00000100, + SparseFile = 0x00000200, + ReparsePoint = 0x00000400, + Compressed = 0x00000800, + Offline = 0x00001000, + NotContentIndexed = 0x00002000, + Encrypted = 0x00004000, + Write_Through = 0x80000000, + Overlapped = 0x40000000, + NoBuffering = 0x20000000, + RandomAccess = 0x10000000, + SequentialScan = 0x08000000, + DeleteOnClose = 0x04000000, + BackupSemantics = 0x02000000, + PosixSemantics = 0x01000000, + OpenReparsePoint = 0x00200000, + OpenNoRecall = 0x00100000, + FirstPipeInstance = 0x00080000, + + InvalidFileAttributes = 0xFFFFFFFF // Returned by GetFileAttributes on Non existant path + } + } +} diff --git a/src/Sdk/Common/Common/Utility/PartitioningResults.cs b/src/Sdk/Common/Common/Utility/PartitioningResults.cs new file mode 100644 index 00000000000..4b0b82e73e7 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/PartitioningResults.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; + +namespace GitHub.Services.Common +{ + /// + /// Contains results from two-way variant of EnuemrableExtensions.Partition() + /// + /// The type of the elements in the contained lists. + public sealed class PartitionResults + { + public List MatchingPartition { get; } = new List(); + + public List NonMatchingPartition { get; } = new List(); + } + + /// + /// Contains results from multi-partitioning variant of EnuemrableExtensions.Partition() + /// + /// The type of the elements in the contained lists. + public sealed class MultiPartitionResults + { + public List> MatchingPartitions { get; } = new List>(); + + public List NonMatchingPartition { get; } = new List(); + } +} diff --git a/src/Sdk/Common/Common/Utility/PathUtility.cs b/src/Sdk/Common/Common/Utility/PathUtility.cs new file mode 100644 index 00000000000..5b902ea01ae --- /dev/null +++ b/src/Sdk/Common/Common/Utility/PathUtility.cs @@ -0,0 +1,40 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal static class PathUtility + { + /// + /// Replacement for Path.Combine. + /// For URL please use UrlUtility.CombineUrl + /// + /// The first half of the path. + /// The second half of the path. + /// The concatenated string with and leading slashes or + /// tildes removed from the second string. + public static String Combine(String path1, String path2) + { + if (String.IsNullOrEmpty(path1)) + { + return path2; + } + + if (String.IsNullOrEmpty(path2)) + { + return path1; + } + + Char separator = path1.Contains("/") ? '/' : '\\'; + + Char[] trimChars = new Char[] { '\\', '/' }; + + return path1.TrimEnd(trimChars) + separator.ToString() + path2.TrimStart(trimChars); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/PrimitiveExtensions.cs b/src/Sdk/Common/Common/Utility/PrimitiveExtensions.cs new file mode 100644 index 00000000000..6c51e9cc4ba --- /dev/null +++ b/src/Sdk/Common/Common/Utility/PrimitiveExtensions.cs @@ -0,0 +1,89 @@ +using GitHub.Services.Common.Internal; +using System; +using System.Text; + +namespace GitHub.Services.Common +{ + public static class PrimitiveExtensions + { + public static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc); + private static readonly long maxSecondsSinceUnixEpoch = (long)DateTime.MaxValue.Subtract(UnixEpoch).TotalSeconds; + + //extension methods to convert to and from a Unix Epoch time to a DateTime + public static Int64 ToUnixEpochTime(this DateTime dateTime) + { + return Convert.ToInt64((dateTime.ToUniversalTime() - UnixEpoch).TotalSeconds); + } + + public static DateTime FromUnixEpochTime(this Int64 unixTime) + { + if (unixTime >= maxSecondsSinceUnixEpoch) + { + return DateTime.MaxValue; + } + else + { + return UnixEpoch + TimeSpan.FromSeconds(unixTime); + } + } + + public static string ToBase64StringNoPaddingFromString(string utf8String) + { + return ToBase64StringNoPadding(Encoding.UTF8.GetBytes(utf8String)); + } + + public static string FromBase64StringNoPaddingToString(string base64String) + { + byte[] result = FromBase64StringNoPadding(base64String); + + if (result == null || result.Length == 0) + { + return null; + } + + return Encoding.UTF8.GetString(result, 0, result.Length); + } + + //These methods convert To and From base64 strings without padding + //for JWT scenarios + //code taken from the JWS spec here: + //http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-08#appendix-C + public static String ToBase64StringNoPadding(this byte[] bytes) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(bytes, "bytes"); + + string s = Convert.ToBase64String(bytes); // Regular base64 encoder + s = s.Split('=')[0]; // Remove any trailing '='s + s = s.Replace('+', '-'); // 62nd char of encoding + s = s.Replace('/', '_'); // 63rd char of encoding + return s; + } + + public static byte[] FromBase64StringNoPadding(this String base64String) + { + ArgumentUtility.CheckStringForNullOrEmpty(base64String, "base64String"); + + string s = base64String; + s = s.Replace('-', '+'); // 62nd char of encoding + s = s.Replace('_', '/'); // 63rd char of encoding + switch (s.Length % 4) // Pad with trailing '='s + { + case 0: break; // No pad chars in this case + case 2: s += "=="; break; // Two pad chars + case 3: s += "="; break; // One pad char + default: + throw new ArgumentException(CommonResources.IllegalBase64String(), "base64String"); + } + return Convert.FromBase64String(s); // Standard base64 decoder + } + + /// + /// Converts base64 represented value into hex string representation. + /// + public static String ConvertToHex(String base64String) + { + var bytes = FromBase64StringNoPadding(base64String); + return BitConverter.ToString(bytes).Replace("-", String.Empty); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/PropertyValidation.cs b/src/Sdk/Common/Common/Utility/PropertyValidation.cs new file mode 100644 index 00000000000..6e682115ab1 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/PropertyValidation.cs @@ -0,0 +1,361 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Reflection; + +namespace GitHub.Services.Common.Internal +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class PropertyValidation + { + public static void ValidateDictionary(IDictionary source) + { + ArgumentUtility.CheckForNull(source, "source"); + + foreach (var entry in source) + { + ValidatePropertyName(entry.Key); + ValidatePropertyValue(entry.Key, entry.Value); + } + } + + public static Boolean IsValidConvertibleType(Type type) + { + return type != null && (type.GetTypeInfo().IsEnum || + type == typeof(Object) || + type == typeof(Byte[]) || + type == typeof(Guid) || + type == typeof(Boolean) || + type == typeof(Char) || + type == typeof(SByte) || + type == typeof(Byte) || + type == typeof(Int16) || + type == typeof(UInt16) || + type == typeof(Int32) || + type == typeof(UInt32) || + type == typeof(Int64) || + type == typeof(UInt64) || + type == typeof(Single) || + type == typeof(Double) || + type == typeof(Decimal) || + type == typeof(DateTime) || + type == typeof(String) + ); + } + + /// + /// Used for deserialization checks. Makes sure that + /// the type string presented is in the inclusion list + /// of valid types for the property service + /// + /// + /// + public static Boolean IsValidTypeString(String type) + { + return s_validPropertyTypeStrings.ContainsKey(type); + } + + /// + /// Used for deserialization checks. Looks up the + /// type string presented in the inclusion list + /// of valid types for the property service and returns the Type object + /// + /// + /// Resulting type that maps to the type string + /// + public static Boolean TryGetValidType(String type, out Type result) + { + return s_validPropertyTypeStrings.TryGetValue(type, out result); + } + + /// + /// Make sure the property name conforms to the requirements for a + /// property name. + /// + /// + public static void ValidatePropertyName(String propertyName) + { + ValidatePropertyString(propertyName, c_maxPropertyNameLengthInChars, "propertyName"); + + // Key must not start or end in whitespace. ValidatePropertyString() checks for null and empty strings, + // which is why indexing on length without re-checking String.IsNullOrEmpty() is ok. + if (Char.IsWhiteSpace(propertyName[0]) || Char.IsWhiteSpace(propertyName[propertyName.Length - 1])) + { + throw new VssPropertyValidationException(propertyName, CommonResources.InvalidPropertyName(propertyName)); + } + } + + /// + /// Make sure the property value is within the supported range of values + /// for the type of the property specified. + /// + /// + /// + public static void ValidatePropertyValue(String propertyName, Object value) + { + // Keep this consistent with XmlPropertyWriter.Write. + if (null != value) + { + Type type = value.GetType(); + TypeCode typeCode = Type.GetTypeCode(type); + + if (type.IsEnum) + { + ValidateStringValue(propertyName, ((Enum)value).ToString("D")); + } + else if (typeCode == TypeCode.Object && value is byte[]) + { + ValidateByteArray(propertyName, (byte[])value); + } + else if (typeCode == TypeCode.Object && value is Guid) + { + //treat Guid like the other valid primitive types that + //don't have explicit columns, e.g. it gets stored as a string + ValidateStringValue(propertyName, ((Guid)value).ToString("N")); + } + else if (typeCode == TypeCode.Object) + { + throw new PropertyTypeNotSupportedException(propertyName, type); + } + else if (typeCode == TypeCode.DBNull) + { + throw new PropertyTypeNotSupportedException(propertyName, type); + } + else if (typeCode == TypeCode.Empty) + { + // should be impossible with null check above, but just in case. + throw new PropertyTypeNotSupportedException(propertyName, type); + } + else if (typeCode == TypeCode.Int32) + { + ValidateInt32(propertyName, (int)value); + } + else if (typeCode == TypeCode.Double) + { + ValidateDouble(propertyName, (double)value); + } + else if (typeCode == TypeCode.DateTime) + { + ValidateDateTime(propertyName, (DateTime)value); + } + else if (typeCode == TypeCode.String) + { + ValidateStringValue(propertyName, (String)value); + } + else + { + // Here are the remaining types. All are supported over in DbArtifactPropertyValueColumns. + // With a property definition they'll be strongly-typed when they're read back. + // Otherwise they read back as strings. + // Boolean + // Char + // SByte + // Byte + // Int16 + // UInt16 + // UInt32 + // Int64 + // UInt64 + // Single + // Decimal + ValidateStringValue(propertyName, value.ToString()); + } + } + } + + private static void ValidateStringValue(String propertyName, String propertyValue) + { + if (propertyValue.Length > c_maxStringValueLength) + { + throw new VssPropertyValidationException("value", CommonResources.InvalidPropertyValueSize(propertyName, typeof(String).FullName, c_maxStringValueLength)); + } + ArgumentUtility.CheckStringForInvalidCharacters(propertyValue, "value", true); + } + + private static void ValidateByteArray(String propertyName, Byte[] propertyValue) + { + if (propertyValue.Length > c_maxByteValueSize) + { + throw new VssPropertyValidationException("value", CommonResources.InvalidPropertyValueSize(propertyName, typeof(Byte[]).FullName, c_maxByteValueSize)); + } + } + + private static void ValidateDateTime(String propertyName, DateTime propertyValue) + { + // Let users get an out of range error for MinValue and MaxValue, not a DateTimeKind unspecified error. + if (propertyValue != DateTime.MinValue + && propertyValue != DateTime.MaxValue) + { + if (propertyValue.Kind == DateTimeKind.Unspecified) + { + throw new VssPropertyValidationException("value", CommonResources.DateTimeKindMustBeSpecified()); + } + + // Make sure the property value is in Universal time. + if (propertyValue.Kind != DateTimeKind.Utc) + { + propertyValue = propertyValue.ToUniversalTime(); + } + } + + CheckRange(propertyValue, s_minAllowedDateTime, s_maxAllowedDateTime, propertyName, "value"); + } + + private static void ValidateDouble(String propertyName, Double propertyValue) + { + if (Double.IsInfinity(propertyValue) || Double.IsNaN(propertyValue)) + { + throw new VssPropertyValidationException("value", CommonResources.DoubleValueOutOfRange(propertyName, propertyValue)); + } + + // SQL Server support: - 1.79E+308 to -2.23E-308, 0 and 2.23E-308 to 1.79E+308 + if (propertyValue < s_minNegative || + (propertyValue < 0 && propertyValue > s_maxNegative) || + propertyValue > s_maxPositive || + (propertyValue > 0 && propertyValue < s_minPositive)) + { + throw new VssPropertyValidationException("value", CommonResources.DoubleValueOutOfRange(propertyName, propertyValue)); + } + } + + private static void ValidateInt32(String propertyName, Int32 propertyValue) + { + // All values allowed. + } + + /// + /// Validation helper for validating all property strings. + /// + /// + /// + /// + private static void ValidatePropertyString(String propertyString, Int32 maxSize, String argumentName) + { + ArgumentUtility.CheckStringForNullOrEmpty(propertyString, argumentName); + if (propertyString.Length > maxSize) + { + throw new VssPropertyValidationException(argumentName, CommonResources.PropertyArgumentExceededMaximumSizeAllowed(argumentName, maxSize)); + } + ArgumentUtility.CheckStringForInvalidCharacters(propertyString, argumentName, true); + } + + public static void CheckPropertyLength(String propertyValue, Boolean allowNull, Int32 minLength, Int32 maxLength, String propertyName, Type containerType, String topLevelParamName) + { + Boolean valueIsInvalid = false; + + if (propertyValue == null) + { + if (!allowNull) + { + valueIsInvalid = true; + } + } + else if ((propertyValue.Length < minLength) || (propertyValue.Length > maxLength)) + { + valueIsInvalid = true; + } + + // throw exception if the value is invalid. + if (valueIsInvalid) + { + // If the propertyValue is null, just print it like an empty string. + if (propertyValue == null) + { + propertyValue = String.Empty; + } + + if (allowNull) + { + // paramName comes second for ArgumentException. + throw new ArgumentException(CommonResources.InvalidStringPropertyValueNullAllowed(propertyValue, propertyName, containerType.Name, minLength, maxLength), topLevelParamName); + } + else + { + throw new ArgumentException(CommonResources.InvalidStringPropertyValueNullForbidden(propertyValue, propertyName, containerType.Name, minLength, maxLength), topLevelParamName); + } + } + } + + /// + /// Verify that a propery is within the bounds of the specified range. + /// + /// The property value + /// The minimum value allowed + /// The maximum value allowed + /// The name of the property + /// The container of the property + /// The top level parameter name + public static void CheckRange(T propertyValue, T minValue, T maxValue, String propertyName, Type containerType, String topLevelParamName) + where T : IComparable + { + if (propertyValue.CompareTo(minValue) < 0 || propertyValue.CompareTo(maxValue) > 0) + { + // paramName comes first for ArgumentOutOfRangeException. + throw new ArgumentOutOfRangeException(topLevelParamName, CommonResources.ValueTypeOutOfRange(propertyValue, propertyName, containerType.Name, minValue, maxValue)); + } + } + + private static void CheckRange(T propertyValue, T minValue, T maxValue, String propertyName, String topLevelParamName) + where T : IComparable + { + if (propertyValue.CompareTo(minValue) < 0 || propertyValue.CompareTo(maxValue) > 0) + { + // paramName comes first for ArgumentOutOfRangeException. + throw new ArgumentOutOfRangeException(topLevelParamName, CommonResources.VssPropertyValueOutOfRange(propertyName, propertyValue, minValue, maxValue)); + } + } + + /// + /// Make sure the property filter conforms to the requirements for a + /// property filter. + /// + /// + public static void ValidatePropertyFilter(String propertyNameFilter) + { + PropertyValidation.ValidatePropertyString(propertyNameFilter, c_maxPropertyNameLengthInChars, "propertyNameFilter"); + } + + // Limits on the sizes of property values + private const Int32 c_maxPropertyNameLengthInChars = 400; + private const Int32 c_maxByteValueSize = 8 * 1024 * 1024; + private const Int32 c_maxStringValueLength = 4 * 1024 * 1024; + + // Minium date time allowed for a property value. + private static readonly DateTime s_minAllowedDateTime = new DateTime(1753, 1, 1, 0, 0, 0, DateTimeKind.Utc); + + // Maximum date time allowed for a property value. + // We can't preserve DateTime.MaxValue faithfully because SQL's cut-off is 3 milliseconds lower. Also to handle UTC to Local shifts, we give ourselves a buffer of one day. + private static readonly DateTime s_maxAllowedDateTime = DateTime.SpecifyKind(DateTime.MaxValue, DateTimeKind.Utc).AddDays(-1); + + private static Double s_minNegative = Double.Parse("-1.79E+308", CultureInfo.InvariantCulture); + private static Double s_maxNegative = Double.Parse("-2.23E-308", CultureInfo.InvariantCulture); + private static Double s_minPositive = Double.Parse("2.23E-308", CultureInfo.InvariantCulture); + private static Double s_maxPositive = Double.Parse("1.79E+308", CultureInfo.InvariantCulture); + + private static readonly Dictionary s_validPropertyTypeStrings = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + //primitive types: + //(NO DBNull or Empty) + { "System.Boolean", typeof(Boolean) }, + { "System.Byte", typeof(Byte) }, + { "System.Char", typeof(Char) }, + { "System.DateTime", typeof(DateTime) }, + { "System.Decimal", typeof(Decimal) }, + { "System.Double", typeof(Double) }, + { "System.Int16", typeof(Int16) }, + { "System.Int32", typeof(Int32) }, + { "System.Int64", typeof(Int64) }, + { "System.SByte", typeof(SByte) }, + { "System.Single", typeof(Single) }, + { "System.String", typeof(String) }, + { "System.UInt16", typeof(UInt16) }, + { "System.UInt32", typeof(UInt32) }, + { "System.UInt64", typeof(UInt64) }, + + //other valid types + { "System.Byte[]", typeof(Byte[]) }, + { "System.Guid", typeof(Guid) } + }; + } +} diff --git a/src/Sdk/Common/Common/Utility/SecretUtility.cs b/src/Sdk/Common/Common/Utility/SecretUtility.cs new file mode 100644 index 00000000000..ec804836d63 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/SecretUtility.cs @@ -0,0 +1,253 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + /// + /// Utility for masking common secret patterns + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public static class SecretUtility + { + /// + /// The string to use to replace secrets when throwing exceptions, logging + /// or otherwise risking exposure + /// + internal const string PasswordMask = "******"; + + /// + /// The string used to mask newer secrets + /// + internal const string SecretMask = ""; + + //We use a different mask per token, to help track down suspicious mask sequences in error + // strings that shouldn't obviously be masked + // Internal for testing, please don't reuse + internal const string PasswordRemovedMask = "**password-removed**"; + internal const string PwdRemovedMask = "**pwd-removed**"; + internal const string PasswordSpaceRemovedMask = "**password-space-removed**"; + internal const string PwdSpaceRemovedMask = "**pwd-space-removed**"; + internal const string AccountKeyRemovedMask = "**account-key-removed**"; + + + /// + /// Whether this string contains an unmasked secret + /// + /// The message to check + /// True if a secret this class supports was found + /// This implementation is as least as expensive as a ScrubSecrets call + public static bool ContainsUnmaskedSecret(string message) + { + return !String.Equals(message, ScrubSecrets(message, false), StringComparison.Ordinal); + } + + + /// + /// Whether this string contains an unmasked secret + /// + /// The message to check + /// True if a secret was found and only jwts were found + /// True if a message this class supports was found + /// This method is a temporary workaround and should be removed in M136 + /// This implementation is as least as expensive as a ScrubSecrets call + public static bool ContainsUnmaskedSecret(string message, out bool onlyJwtsFound) + { + if (string.IsNullOrEmpty(message)) + { + onlyJwtsFound = false; + return false; + } + + string scrubbedMessage = ScrubJwts(message, assertOnDetection: false); + bool jwtsFound = !String.Equals(message, scrubbedMessage, StringComparison.Ordinal); + scrubbedMessage = ScrubTraditionalSecrets(message, assertOnDetection: false); + bool secretsFound = !String.Equals(message, scrubbedMessage, StringComparison.Ordinal); + onlyJwtsFound = !secretsFound && jwtsFound; + return secretsFound || jwtsFound; + } + + /// + /// Scrub a message for any secrets(passwords, tokens) in known formats + /// This method is called to scrub exception messages and traces to prevent any secrets + /// from being leaked. + /// + /// The message to verify for secret data. + /// When true, if a message contains a + /// secret in a known format the method will debug assert. Default = true. + /// The message with any detected secrets masked + /// This only does best effort pattern matching for a set of known patterns + public static string ScrubSecrets(string message, bool assertOnDetection = true) + { + if (string.IsNullOrEmpty(message)) + { + return message; + } + + message = ScrubTraditionalSecrets(message, assertOnDetection); + message = ScrubJwts(message, assertOnDetection); + return message; + } + + private static string ScrubTraditionalSecrets(string message, bool assertOnDetection) + { + message = ScrubSecret(message, c_passwordToken, PasswordRemovedMask, assertOnDetection); + message = ScrubSecret(message, c_pwdToken, PwdRemovedMask, assertOnDetection); + message = ScrubSecret(message, c_passwordTokenSpaced, PasswordSpaceRemovedMask, assertOnDetection); + message = ScrubSecret(message, c_pwdTokenSpaced, PwdSpaceRemovedMask, assertOnDetection); + message = ScrubSecret(message, c_accountKeyToken, AccountKeyRemovedMask, assertOnDetection); + + message = ScrubSecret(message, c_authBearerToken, SecretMask, assertOnDetection); + return message; + } + + private static string ScrubJwts(string message, bool assertOnDetection) + { + //JWTs are sensitive and we need to scrub them, so this is a best effort attempt to + // scrub them based on typical patterns we see + message = ScrubSecret(message, c_jwtTypToken, SecretMask, assertOnDetection, + maskToken: true); + message = ScrubSecret(message, c_jwtAlgToken, SecretMask, assertOnDetection, + maskToken: true); + message = ScrubSecret(message, c_jwtX5tToken, SecretMask, assertOnDetection, + maskToken: true); + message = ScrubSecret(message, c_jwtKidToken, SecretMask, assertOnDetection, + maskToken: true); + return message; + } + + private static string ScrubSecret(string message, string token, string mask, bool assertOnDetection, bool maskToken=false) + { + int startIndex = -1; + + do + { + startIndex = message.IndexOf(token, (startIndex < 0) ? 0 : startIndex, StringComparison.OrdinalIgnoreCase); + if (startIndex < 0) + { + // Common case, there is not a password. + break; + } + + //Explicitly check for original password mask so code that uses the orignal doesn't assert + if (!maskToken && ( + message.IndexOf(token + mask, StringComparison.OrdinalIgnoreCase) == startIndex + || (message.IndexOf(token + PasswordMask, StringComparison.OrdinalIgnoreCase) == startIndex))) + { + // The password is already masked, move past this string. + startIndex += token.Length + mask.Length; + continue; + } + + // At this point we detected a password that is not masked, remove it! + try + { + if (!maskToken) + { + startIndex += token.Length; + } + // Find the end of the password. + int endIndex = message.Length - 1; + + if (message[startIndex] == '"' || message[startIndex] == '\'') + { + // The password is wrapped in quotes. The end of the string will be the next unpaired quote. + // Unless the message itself wrapped the connection string in quotes, in which case we may mask out the rest of the message. Better to be safe than leak the connection string. + // Intentionally going to "i < message.Length - 1". If the quote isn't the second to last character, it is the last character, and we delete to the end of the string anyway. + for (int i = startIndex + 1; i < message.Length - 1; i++) + { + if (message[startIndex] == message[i]) + { + if (message[startIndex] == message[i + 1]) + { + // we found a pair of quotes. Skip over the pair and continue. + i++; + continue; + } + else + { + // this is a single quote, and the end of the password. + endIndex = i; + break; + } + } + } + } + else + { + // The password is not wrapped in quotes. + // The end is any whitespace, semi-colon, single, or double quote character. + for (int i = startIndex + 1; i < message.Length; i++) + { + if (Char.IsWhiteSpace(message[i]) || ((IList)s_validPasswordEnding).Contains(message[i])) + { + endIndex = i - 1; + break; + } + } + } + + message = message.Substring(0, startIndex) + mask + message.Substring(endIndex + 1); + + // Bug 94478: We need to scrub the message before Assert, otherwise we will fall into + // a recursive assert where the TeamFoundationServerException contains same message + if (assertOnDetection) + { + Debug.Assert(false, String.Format(CultureInfo.InvariantCulture, "Message contains an unmasked secret. Message: {0}", message)); + } + + // Trace raw that we have scrubbed a message. + //FUTURE: We need a work item to add Tracing to the VSS Client assembly. + //TraceLevel traceLevel = assertOnDetection ? TraceLevel.Error : TraceLevel.Info; + //TeamFoundationTracingService.TraceRaw(99230, traceLevel, s_area, s_layer, "An unmasked password was detected in a message. MESSAGE: {0}. STACK TRACE: {1}", message, Environment.StackTrace); + } + catch (Exception /*exception*/) + { + // With an exception here the message may still contain an unmasked password. + // We also do not want to interupt the current thread with this exception, because it may be constucting a message + // for a different exception. Trace this exception and continue on using a generic exception message. + //TeamFoundationTracingService.TraceExceptionRaw(99231, s_area, s_layer, exception); + } + finally + { + // Iterate to the next password (if it exists) + startIndex += mask.Length; + } + } while (startIndex < message.Length); + + return message; + } + + private const string c_passwordToken = "Password="; + private const string c_passwordTokenSpaced = "-Password "; + private const string c_pwdToken = "Pwd="; + private const string c_pwdTokenSpaced = "-Pwd "; + private const string c_accountKeyToken = "AccountKey="; + private const string c_authBearerToken = "Bearer "; + /// + /// {"typ":" // eyJ0eXAiOi + /// + private const string c_jwtTypToken = "eyJ0eXAiOi"; + /// + /// {"alg":" // eyJhbGciOi + /// + private const string c_jwtAlgToken = "eyJhbGciOi"; + /// + /// {"x5t":" // eyJ4NXQiOi + /// + private const string c_jwtX5tToken = "eyJ4NXQiOi"; + /// + /// {"kid":" // eyJraWQiOi + /// + private const string c_jwtKidToken = "eyJraWQiOi"; + + + + private static readonly char[] s_validPasswordEnding = new char[] { ';', '\'', '"' }; + } +} diff --git a/src/Sdk/Common/Common/Utility/SecureCompare.cs b/src/Sdk/Common/Common/Utility/SecureCompare.cs new file mode 100644 index 00000000000..90e896bf6a3 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/SecureCompare.cs @@ -0,0 +1,49 @@ +using System; +using System.ComponentModel; + +namespace GitHub.Services.Common +{ + public static class SecureCompare + { + /// + /// Compare two byte arrays for byte-by-byte equality. + /// If both arrays are the same length, the running time of this routine will not vary with the number of equal bytes between the two. + /// + /// A byte array (non-null) + /// A byte array (non-null) + /// + /// Checking secret values using built-in equality operators is insecure. + /// Operations like `==` on strings will stop the comparison when the first unmatched character is encountered. + /// When checking secret values from an untrusted source that we use for authentication, we must be careful + /// not to stop the comparison early for incorrect values. + /// If we do, an attacker can send a large volume of requests and use statistical methods to infer the secret value byte-by-byte. + /// + /// This method is intended to be used with arrays of the same length -- for example, two hashes from the same SHA algorithm. + /// Comparing strings of unequal length can leak length information to an attacker. + /// + public static bool TimeInvariantEquals(byte[] lhs, byte[] rhs) + { + if (lhs.Length != rhs.Length) + { + return false; + } + + // Must use bitwise operations + // Conditional branching or short-circuiting Boolean operators would change the running time depending on the result + int result = 0; + for (int i = 0; i < lhs.Length; i++) + { + result |= lhs[i] ^ rhs[i]; + } + + return result == 0; + } + + // Hide the `Equals` method inherited from `object` + [EditorBrowsable(EditorBrowsableState.Never)] + public new static bool Equals(object lhs, object rhs) + { + throw new NotImplementedException($"This is not the secure equals method! Use `{nameof(SecureCompare.TimeInvariantEquals)}` instead."); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/StreamParser.cs b/src/Sdk/Common/Common/Utility/StreamParser.cs new file mode 100644 index 00000000000..f67b0f88eff --- /dev/null +++ b/src/Sdk/Common/Common/Utility/StreamParser.cs @@ -0,0 +1,188 @@ +using System; +using System.ComponentModel; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Services.Common +{ + /// + /// Simple helper class used to break up a stream into smaller streams + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class StreamParser + { + public StreamParser(Stream fileStream, int chunkSize) + { + m_stream = fileStream; + m_chunkSize = chunkSize; + } + + /// + /// Returns total length of file. + /// + public long Length + { + get + { + return m_stream.Length; + } + } + + /// + /// returns the next substream + /// + /// + public SubStream GetNextStream() + { + return new SubStream(m_stream, m_chunkSize); + } + + Stream m_stream; + int m_chunkSize; + } + + /// + /// Streams a subsection of a larger stream + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class SubStream : Stream + { + public SubStream(Stream stream, int maxStreamSize) + { + m_startingPosition = stream.Position; + long remainingStream = stream.Length - m_startingPosition; + m_length = Math.Min(maxStreamSize, remainingStream); + m_stream = stream; + } + + public override bool CanRead + { + get + { + return m_stream.CanRead && m_stream.Position <= this.EndingPostionOnOuterStream; + } + } + + public override bool CanSeek + { + get { return m_stream.CanSeek; } + } + + public override bool CanWrite + { + get { return false; } + } + + public override void Flush() + { + throw new NotImplementedException(); + } + + public override long Length + { + get + { + return m_length; + } + } + + public override long Position + { + get + { + return m_stream.Position - m_startingPosition; + } + set + { + if (value >= m_length) + { + throw new EndOfStreamException(); + } + + m_stream.Position = m_startingPosition + value; + } + } + + /// + /// Postion in larger stream where this substream starts + /// + public long StartingPostionOnOuterStream + { + get + { + return m_startingPosition; + } + } + + /// + /// Postion in larger stream where this substream ends + /// + public long EndingPostionOnOuterStream + { + get + { + return m_startingPosition + m_length - 1; + } + } + + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + // check that the read is only in our substream + count = EnsureLessThanOrEqualToRemainingBytes(count); + + return m_stream.ReadAsync(buffer, offset, count, cancellationToken); + } + + public override int Read(byte[] buffer, int offset, int count) + { + // check that the read is only in our substream + count = EnsureLessThanOrEqualToRemainingBytes(count); + + return m_stream.Read(buffer, offset, count); + } + + public override long Seek(long offset, SeekOrigin origin) + { + if (origin == SeekOrigin.Begin && 0 <= offset && offset < m_length) + { + return m_stream.Seek(offset + m_startingPosition, origin); + } + else if (origin == SeekOrigin.End && 0 >= offset && offset > -m_length) + { + return m_stream.Seek(offset - ((m_stream.Length-1) - this.EndingPostionOnOuterStream), origin); + } + else if (origin == SeekOrigin.Current && (offset + m_stream.Position) >= this.StartingPostionOnOuterStream && (offset + m_stream.Position) < this.EndingPostionOnOuterStream) + { + return m_stream.Seek(offset, origin); + } + + throw new ArgumentException(); + } + + public override void SetLength(long value) + { + throw new NotImplementedException(); + } + + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotImplementedException(); + } + + private int EnsureLessThanOrEqualToRemainingBytes(int numBytes) + { + long remainingBytesInStream = m_length - this.Position; + if (numBytes > remainingBytesInStream) + { + numBytes = Convert.ToInt32(remainingBytesInStream); + } + return numBytes; + } + + private long m_length; + private long m_startingPosition; + private Stream m_stream; + } + +} diff --git a/src/Sdk/Common/Common/Utility/TypeExtensionMethods.cs b/src/Sdk/Common/Common/Utility/TypeExtensionMethods.cs new file mode 100644 index 00000000000..f1c68fbdf01 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/TypeExtensionMethods.cs @@ -0,0 +1,294 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Reflection; + +namespace GitHub.Services.Common +{ + public static class TypeExtensionMethods + { + /// + /// Determins if a value is assignable to the requested type. It goes + /// the extra step beyond IsAssignableFrom in that it also checks for + /// IConvertible and attempts to convert the value. + /// + /// + /// + /// + public static bool IsAssignableOrConvertibleFrom(this Type type, object value) + { + if (value == null) + { + return false; + } + + if (!type.GetTypeInfo().IsAssignableFrom(value.GetType().GetTypeInfo())) + { + if (value is IConvertible) + { + // Try and convert to the requested type, if successful + // assign value to the result so we don't have to do again. + try + { + ConvertUtility.ChangeType(value, type, CultureInfo.CurrentCulture); + return true; + } + catch (FormatException) + { + } + catch (InvalidCastException) + { + } + catch (OverflowException) + { + } + } + return false; + } + + return true; + } + + /// + /// Determines if the type is of the type t. + /// + /// The type to check. + /// The type to compare to. + /// True if of the same type, otherwise false. + public static bool IsOfType(this Type type, Type t) + { + if (t.GetTypeInfo().IsAssignableFrom(type.GetTypeInfo())) + { + return true; + } + else if (type.GetTypeInfo().IsGenericType && + type.GetGenericTypeDefinition() == t) + { + //generic type + return true; + } + else if (type.GetTypeInfo().ImplementedInterfaces.Any( + i => i.GetTypeInfo().IsGenericType && + i.GetGenericTypeDefinition() == t)) + { + //implements generic type + return true; + } + + return false; + } + + + /// + /// Determines if the type is a Dictionary. + /// + /// The type to check. + /// True if a dictionary, otherwise false. + public static bool IsDictionary(this Type type) + { + if (typeof(IDictionary).GetTypeInfo().IsAssignableFrom(type.GetTypeInfo())) + { + //non-generic dictionary + return true; + } + else if (type.GetTypeInfo().IsGenericType && + type.GetGenericTypeDefinition() == typeof(IDictionary<,>)) + { + //generic dictionary interface + return true; + } + else if (type.GetTypeInfo().ImplementedInterfaces.Any( + i => i.GetTypeInfo().IsGenericType && + i.GetGenericTypeDefinition() == typeof(IDictionary<,>))) + { + //implements generic dictionary + return true; + } + + return false; + } + + /// + /// Determines if the type is a List. + /// + /// The type to check. + /// True if a list, otherwise false. + public static bool IsList(this Type type) + { + if (typeof(IList).GetTypeInfo().IsAssignableFrom(type.GetTypeInfo())) + { + //non-generic list + return true; + } + else if (type.GetTypeInfo().IsGenericType && + type.GetGenericTypeDefinition() == typeof(IList<>)) + { + //generic list interface + return true; + } + else if (type.GetTypeInfo().ImplementedInterfaces.Any( + i => i.GetTypeInfo().IsGenericType && + i.GetGenericTypeDefinition() == typeof(IList<>))) + { + //implements generic list + return true; + } + + return false; + } + + /// + /// Get's the type of the field/property specified. + /// + /// The type to get the field/property from. + /// The name of the field/property. + /// The type of the field/property or null if no match found. + public static Type GetMemberType(this Type type, string name) + { + TypeInfo typeInfo = type.GetTypeInfo(); + PropertyInfo propertyInfo = GetPublicInstancePropertyInfo(type, name); + if (propertyInfo != null) + { + return propertyInfo.PropertyType; + } + else + { + FieldInfo fieldInfo = GetPublicInstanceFieldInfo(type, name); + if (fieldInfo != null) + { + return fieldInfo.FieldType; + } + } + return null; + } + + /// + /// Get's the value of the field/property specified. + /// + /// The type to get the field/property from. + /// The name of the field/property. + /// The object to get the value from. + /// The value of the field/property or null if no match found. + public static object GetMemberValue(this Type type, string name, object obj) + { + PropertyInfo propertyInfo = GetPublicInstancePropertyInfo(type, name); + if (propertyInfo != null) + { + return propertyInfo.GetValue(obj); + } + else + { + FieldInfo fieldInfo = GetPublicInstanceFieldInfo(type, name); + if (fieldInfo != null) + { + return fieldInfo.GetValue(obj); + } + } + return null; + } + + /// + /// Set's the value of the field/property specified. + /// + /// The type to get the field/property from. + /// The name of the field/property. + /// The object to set the value to. + /// The value to set. + public static void SetMemberValue(this Type type, string name, object obj, object value) + { + PropertyInfo propertyInfo = GetPublicInstancePropertyInfo(type, name); + if (propertyInfo != null) + { + if (!propertyInfo.SetMethod.IsPublic) + { + // this is here to match original behaviour before we switched to PCL version of code. + throw new ArgumentException("Property set method not public."); + } + propertyInfo.SetValue(obj, value); + } + else + { + FieldInfo fieldInfo = GetPublicInstanceFieldInfo(type, name); + if (fieldInfo != null) + { + fieldInfo.SetValue(obj, value); + } + } + } + +#if NETSTANDARD + /// + /// Portable compliant way to get a constructor with specified arguments. This will return a constructor that is public or private as long as the arguments match. NULL will be returned if there is no match. + /// Note that it will pick the first one it finds that matches, which is not necesarily the best match. + /// + /// The Type that has the constructor + /// The type of the arguments for the constructor. + /// + public static ConstructorInfo GetFirstMatchingConstructor(this Type type, params Type[] parameterTypes) + { + return type.GetTypeInfo().DeclaredConstructors.GetFirstMatchingConstructor(parameterTypes); + } + + /// + /// Portable compliant way to get a constructor with specified arguments from a prefiltered list. This will return a constructor that is public or private as long as the arguments match. NULL will be returned if there is no match. + /// Note that it will pick the first one it finds that matches, which is not necesarily the best match. + /// + /// Prefiltered list of constructors + /// The type of the arguments for the constructor. + /// + public static ConstructorInfo GetFirstMatchingConstructor(this IEnumerable constructors, params Type[] parameterTypes) + { + foreach (ConstructorInfo constructorInfo in constructors) + { + ParameterInfo[] parameters = constructorInfo.GetParameters(); + if (parameters.Length == parameterTypes.Length) + { + int i; + bool matches = true; + for (i = 0; i < parameterTypes.Length; i++) + { + if (parameters[i].ParameterType != parameterTypes[i] && !parameters[i].ParameterType.GetTypeInfo().IsAssignableFrom(parameterTypes[i].GetTypeInfo())) + { + matches = false; + break; + } + } + if (matches) + { + return constructorInfo; + } + } + } + return null; + } +#endif + + private static PropertyInfo GetPublicInstancePropertyInfo(Type type, string name) + { + Type typeToCheck = type; + PropertyInfo propertyInfo = null; + while (propertyInfo == null && typeToCheck != null) + { + TypeInfo typeInfo = typeToCheck.GetTypeInfo(); + propertyInfo = typeInfo.DeclaredProperties.FirstOrDefault(p => p.Name.Equals(name, StringComparison.OrdinalIgnoreCase) && p.GetMethod.Attributes.HasFlag(MethodAttributes.Public) && !p.GetMethod.Attributes.HasFlag(MethodAttributes.Static)); + typeToCheck = typeInfo.BaseType; + } + return propertyInfo; + } + + private static FieldInfo GetPublicInstanceFieldInfo(Type type, string name) + { + Type typeToCheck = type; + FieldInfo fieldInfo = null; + while (fieldInfo == null && typeToCheck != null) + { + TypeInfo typeInfo = typeToCheck.GetTypeInfo(); + fieldInfo = typeInfo.DeclaredFields.FirstOrDefault(f => f.Name.Equals(name, StringComparison.OrdinalIgnoreCase) && f.IsPublic && !f.IsStatic); + typeToCheck = typeInfo.BaseType; + } + return fieldInfo; + } + } +} diff --git a/src/Sdk/Common/Common/Utility/UriExtensions.cs b/src/Sdk/Common/Common/Utility/UriExtensions.cs new file mode 100644 index 00000000000..e25d8f0c8d6 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/UriExtensions.cs @@ -0,0 +1,160 @@ +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.Linq; +using System.Text; + +namespace GitHub.Services.Common +{ + public static class UriExtensions + { + public static Uri AppendQuery(this Uri uri, String name, String value) + { + ArgumentUtility.CheckForNull(uri, "uri"); + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + ArgumentUtility.CheckStringForNullOrEmpty(value, "value"); + + StringBuilder stringBuilder = new StringBuilder(uri.Query.TrimStart('?')); + + AppendSingleQueryValue(stringBuilder, name, value); + + UriBuilder uriBuilder = new UriBuilder(uri); + + uriBuilder.Query = stringBuilder.ToString(); + + return uriBuilder.Uri; + } + + public static Uri AppendQuery(this Uri uri, IEnumerable> queryValues) + { + ArgumentUtility.CheckForNull(uri, "uri"); + ArgumentUtility.CheckForNull(queryValues, "queryValues"); + + StringBuilder stringBuilder = new StringBuilder(uri.Query.TrimStart('?')); + + foreach (KeyValuePair kvp in queryValues) + { + AppendSingleQueryValue(stringBuilder, kvp.Key, kvp.Value); + } + + UriBuilder uriBuilder = new UriBuilder(uri); + uriBuilder.Query = stringBuilder.ToString(); + return uriBuilder.Uri; + } + + public static Uri AppendQuery(this Uri uri, NameValueCollection queryValues) + { + ArgumentUtility.CheckForNull(uri, "uri"); + ArgumentUtility.CheckForNull(queryValues, "queryValues"); + + StringBuilder stringBuilder = new StringBuilder(uri.Query.TrimStart('?')); + + foreach (String name in queryValues) + { + AppendSingleQueryValue(stringBuilder, name, queryValues[name]); + } + + UriBuilder uriBuilder = new UriBuilder(uri); + + uriBuilder.Query = stringBuilder.ToString(); + + return uriBuilder.Uri; + } + + /// + /// Performs an Add similar to the NameValuCollection 'Add' method where the value gets added as an item in a comma delimited list if the key is already present. + /// + /// + /// + /// + /// + /// + public static void Add(this IList> collection, String key, T value, Func convert = null) + { + collection.AddMultiple(key, new List { value }, convert); + } + + public static void AddMultiple(this IList> collection, String key, IEnumerable values, Func convert) + { + ArgumentUtility.CheckForNull(collection, "collection"); + ArgumentUtility.CheckStringForNullOrEmpty(key, "name"); + + if (convert == null) convert = (val) => val.ToString(); + + if (values != null && values.Any()) + { + StringBuilder newValue = new StringBuilder(); + KeyValuePair matchingKvp = collection.FirstOrDefault(kvp => kvp.Key.Equals(key)); + if (matchingKvp.Key == key) + { + collection.Remove(matchingKvp); + newValue.Append(matchingKvp.Value); + } + + foreach (var value in values) + { + if (newValue.Length > 0) + { + newValue.Append(","); + } + newValue.Append(convert(value)); + } + + collection.Add(new KeyValuePair(key, newValue.ToString())); + } + } + + public static void Add(this IList> collection, String key, String value) + { + collection.AddMultiple(key, new[] { value }); + } + + public static void AddMultiple(this IList> collection, String key, IEnumerable values) + { + collection.AddMultiple(key, values, (val) => val); + } + + public static void AddMultiple(this NameValueCollection collection, String name, IEnumerable values, Func convert) + { + ArgumentUtility.CheckForNull(collection, "collection"); + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + + if (convert == null) convert = (val) => val.ToString(); + + if (values != null) + { + foreach (var value in values) + { + collection.Add(name, convert(value)); + } + } + } + + public static void AddMultiple(this NameValueCollection collection, String name, IEnumerable values) + { + ArgumentUtility.CheckForNull(collection, "collection"); + collection.AddMultiple(name, values, (val) => val); + } + + /// + /// Get the absolute path of the given Uri, if it is absolute. + /// + /// If the URI is absolute, the string form of it is returned; otherwise, + /// the URI's string representation. + public static string AbsoluteUri(this Uri uri) + { + return uri.IsAbsoluteUri ? uri.AbsoluteUri : uri.ToString(); + } + + private static void AppendSingleQueryValue(StringBuilder builder, String name, String value) + { + if (builder.Length > 0) + { + builder.Append("&"); + } + builder.Append(Uri.EscapeDataString(name)); + builder.Append("="); + builder.Append(Uri.EscapeDataString(value)); + } + } +} diff --git a/src/Sdk/Common/Common/Utility/UriUtility.cs b/src/Sdk/Common/Common/Utility/UriUtility.cs new file mode 100644 index 00000000000..875fdc06537 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/UriUtility.cs @@ -0,0 +1,2204 @@ +// This #define is used by the HtmlEncode and HtmlDecode logic below. The original comment +// from ndp/fx/src/net/system/net/webutility.cs follows. +#define ENTITY_ENCODE_HIGH_ASCII_CHARS + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.ComponentModel; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.IO; +using System.Net; +using System.Runtime.Serialization; +using System.Text; +using System.Text.RegularExpressions; +using System.Net.Sockets; + +namespace GitHub.Services.Common.Internal +{ + /// + /// Utility class for general Uri actions. See LinkingUtilities for artifact uri specific methods. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public static class UriUtility + { + private const String c_uriSchemeHttp = "http"; + private const String c_uriSchemeHttps = "https"; + + /// + /// List of URI schemes considered to be UNSAFE. When a URL with this is scheme is navigated to, we will + /// inform the user that it is unsafe and prevent navigation. + /// + /// + /// Note: the data scheme is not in this list nor is it in the safe list either, this is by design. + /// See http://msdn.microsoft.com/en-us/library/ie/cc848897(v=vs.85).aspx for a reference of the data protocol. + /// See http://www.gnucitizen.org/blog/bugs-in-the-browser-firefoxs-data-url-scheme-vulnerability/ for attacks on the data protocol. + /// + private static readonly ICollection UnsafeUriSchemeList = new HashSet(new string[] { + "javascript", "vbscript" + }, VssStringComparer.UriScheme); + + /// + /// List of URI schemes considered to be SAFE. When a URL with this scheme is navigated to (e.g. in WIT client or + /// web access), we will not warn the user about it as they are trusted. For any other URL not in this list, we will + /// warn the user about it! + /// + /// + /// "x-mvwit" is here as it is used and saved to work item history rich HTML changes. It is used to reference other work + /// items in the list. If we remove it from this list, it will not be a navigatable link in e.g. Web Access. + /// + private static readonly ICollection SafeUriSchemeList = new HashSet(new string[] { + "http", "https", "ftp", "gopher", "mailto", "news", "telnet", "wais", + "vstfs", "tfs", "alm", "mtm", "mtms", "mfbclient", "mfbclients", + "x-mvwit" + }, VssStringComparer.UriScheme); + + private const char PathSeparatorChar = '/'; + private const string PathSeparator = "/"; + + private class _AbsoluteUriStringComparer : IEqualityComparer + { + public bool Equals(Uri x, Uri y) + { + string xAbsoluteUri = (x != null) ? GetInvariantAbsoluteUri(x) : null; + string yAbsoluteUri = (y != null) ? GetInvariantAbsoluteUri(y) : null; + + return VssStringComparer.Url.Equals(xAbsoluteUri, yAbsoluteUri); + } + + public int GetHashCode(Uri obj) + { + return GetInvariantAbsoluteUri(obj).GetHashCode(); + } + } + + private class _UrlPathIgnoreSeparatorsComparer : IEqualityComparer + { + public bool Equals(string x, string y) + { + return VssStringComparer.UrlPath.Equals(TrimPathSeparators(x), TrimPathSeparators(y)); + } + + public int GetHashCode(string obj) + { + return VssStringComparer.UrlPath.GetHashCode(TrimPathSeparators(obj)); + } + } + + /// + /// string comparer for uri, is not case sensitive and does not care about trailing '/' + /// + public static IEqualityComparer AbsoluteUriStringComparer = new _AbsoluteUriStringComparer(); + + /// + /// Compares URL Paths ignoring any starting or ending path separators. + /// + public static IEqualityComparer UrlPathIgnoreSeparatorsComparer = new _UrlPathIgnoreSeparatorsComparer(); + + /// + /// A URL is considered unsafe IF: + /// * It is not an absolute URI (e.g. a relative file.html) + /// * It's scheme is part of the unsafe scheme list. + /// * It is a file:// URI pointing to a local file, e.g. file://C:\Windows\System32\notepad.exe + /// + public static bool IsUriUnsafe(Uri uri) + { + ArgumentUtility.CheckForNull(uri, "uri"); + + return !uri.IsAbsoluteUri || UnsafeUriSchemeList.Contains(uri.Scheme) + || IsUriLocalFile(uri); // Also exclude all file URLs pointing to local files + } + + /// + /// Checks if a URL is considered safe. Users will not e.g. be prompted with a warning when navigating + /// to these URLs. + /// + /// A url. + /// + /// A URL is approved IF: + /// * It is an absolute URI. + /// * It's scheme is part of the safe scheme list. + /// * It is NOT a file:// URI pointing to a local file, e.g. file://C:\Windows\System32\notepad.exe + /// + public static bool IsUriSafe(Uri uri) + { + ArgumentUtility.CheckForNull(uri, "uri"); + return uri.IsAbsoluteUri && SafeUriSchemeList.Contains(uri.Scheme) + && !IsUriLocalFile(uri); // Also exclude all file URLs pointing to local files + } + + /// + /// Checks if a URL is pointing to a local file (not on a network share or host), based on the presence of a host/authority in the URL. + /// It attempts to do no comparison based on the host name. + /// + /// A url. + /// true if the URL points to a file on the local computer. + public static bool IsUriLocalFile(Uri uri) + { + ArgumentUtility.CheckForNull(uri, "uri"); + return uri.IsAbsoluteUri && uri.IsFile && !uri.IsUnc; + } + + /// + /// returns the absolute Uri but in a consistent way such that the presence of a trailing slash doesnt affect the returned string, + /// also converts the uri to lowerInvariant + /// + /// Uri, cannot be null, must be Absolute + /// + public static string GetInvariantAbsoluteUri(Uri uri) + { + //examples (Uri -> return value): + // new Uri("http://server/tfs/")-> "http://server/tfs" + // new Uri("http://server/tfs") -> "http://server/tfs" + // new Uri("http://server/") -> "http://server/" + // new Uri("http://server") -> "http://server/" + + Debug.Assert(uri != null); + Debug.Assert(uri.IsAbsoluteUri); + + string absoluteUri = uri.AbsoluteUri; + + if (uri.Segments.Length > 1) + { + absoluteUri = TrimEndingPathSeparator(absoluteUri); + } + return absoluteUri.ToLowerInvariant(); + } + + /// + /// Joins a URI and a relativePath with a single forward slash. Duplicate slashes at the junction point are removed. + /// + /// The base Uri. Must be an absolute Uri. The last segment of the Uri path (if any) is considered a virtual directory and not removed, even if it does not have a trailing forward slash. + /// The relative path to append to the Uri. + /// If true, any leading forward slashes on the relative path argument are discarded. + /// The base Uri with the relativePath appended to it. + /// + /// This is intended to be an alternative the Uri constructor, which can remove several path segments from your arguments. For example: + /// + /// new Uri(new Uri("http://localhost/abc/efg/"), "/Hello/World") returns http://localhost/Hello/World ("/abc/efg/" removed due to absolute path argument) + /// new Uri(new Uri("http://localhost/dir1/dir2"), "hi.txt") returns http://localhost/dir1/hi.txt ("dir2" removed due to lack of trailing slash) + /// + public static Uri Combine(string baseUri, String relativePath, Boolean treatAbsolutePathAsRelative) + { + Uri uri = new Uri(baseUri); + Debug.Assert(uri.IsAbsoluteUri); + + return Combine(uri, relativePath, treatAbsolutePathAsRelative); + } + + /// + /// Joins a URI and a relativePath with a single forward slash. Duplicate slashes at the junction point are removed. + /// + /// The base Uri. Must be an absolute Uri. The last segment of the Uri path (if any) is considered a virtual directory and not removed, even if it does not have a trailing forward slash. + /// The relative path to append to the Uri. + /// If true, any leading forward slashes on the relative path argument are discarded. + /// The base Uri with the relativePath appended to it. + /// + /// This is intended to be an alternative the Uri constructor, which can remove several path segments from your arguments. For example: + /// + /// new Uri(new Uri("http://localhost/abc/efg/"), "/Hello/World") returns http://localhost/Hello/World ("/abc/efg/" removed due to absolute path argument) + /// new Uri(new Uri("http://localhost/dir1/dir2"), "hi.txt") returns http://localhost/dir1/hi.txt ("dir2" removed due to lack of trailing slash) + /// + public static Uri Combine(Uri baseUri, String relativePath, Boolean treatAbsolutePathAsRelative) + { + if (baseUri == null) + { + throw new ArgumentNullException("baseUri"); + } + + if (relativePath == null) + { + throw new ArgumentNullException("relativePath"); + } + + UriBuilder uriBuilder = new UriBuilder(baseUri); + + Char[] forwardSlash = new Char[] { '/' }; + + // The base Uri might have several trailing slashes, so lets get rid of all of them. + uriBuilder.Path = uriBuilder.Path.TrimEnd(forwardSlash); + + // Now add a single trailing slash to the baseUri to ensure its last path segment is interpreted as a directory instead of a file. + uriBuilder.Path = UriUtility.AppendSlashToPathIfNeeded(uriBuilder.Path); + + if (VssStringComparer.Url.StartsWith(relativePath, "/")) + { + if (treatAbsolutePathAsRelative) + { + // remove any leading slashes + relativePath = relativePath.TrimStart(forwardSlash); + } + else + { + throw new ArgumentException(CommonResources.AbsoluteVirtualPathNotAllowed(relativePath), "relativePath"); + } + } + + CheckRelativePath(relativePath); + + Uri relativeUri = new Uri(relativePath, UriKind.Relative); + + return new Uri(uriBuilder.Uri, relativeUri); + } + + public static bool Equals(Uri uri1, Uri uri2) + { + return AbsoluteUriStringComparer.Equals(uri1, uri2); + } + + /// + /// Combine two paths using "/" instead of "\" like Path.Combine does + /// + /// the first part of the path + /// the second part of the path + /// combined path + public static string CombinePath(string part1, string part2) + { + char[] slash = new char[] { '/' }; + + if (string.IsNullOrEmpty(part1)) + { + return part2; + } + + if (string.IsNullOrEmpty(part2)) + { + return part1; + } + + return string.Format(System.Globalization.CultureInfo.InvariantCulture, "{0}/{1}", part1.TrimEnd(slash), part2.TrimStart(slash)); + } + + /// + /// Returns 'true' if the specified uri is http or https, 'false' otherwise. + /// + public static bool IsUriHttp(Uri uri) + { + ArgumentUtility.CheckForNull(uri, "uri"); + + return uri.IsAbsoluteUri && (uri.Scheme == c_uriSchemeHttp || uri.Scheme == c_uriSchemeHttps); + } + + /// + /// Check that the Uri has http or https as its scheme. We don't want anyone pointing at file://virus.exe. + /// + /// Uri to be checked. + public static void CheckUriIsHttp(Uri uri) + { + CheckUriIsHttp(uri, true); + } + + /// + /// Check that the Uri has http or https as its scheme. We don't want anyone pointing at file://virus.exe. + /// + /// Uri to be checked. + /// Allow sub path and query string if true. If false, the URL must be authority only. + public static void CheckUriIsHttp(Uri uri, bool allowPathAndQuery) + { + if (uri != null) + { + if (!IsUriHttp(uri)) + { + //It isn't an http/https location so we disallow it. + throw new VssServiceException(CommonResources.UriUtility_UriNotAllowed(uri.AbsoluteUri)); + } + + if (allowPathAndQuery == false && uri.PathAndQuery.Trim(new char[] { '/' }).Length > 0) + { + throw new VssServiceException(CommonResources.UriUtility_MustBeAuthorityOnlyUri(uri, uri.GetLeftPart(UriPartial.Authority))); + } + } + } + + /// + /// Check that a given Uri is an absolute Uri. Also, ensure its is http or https. + /// + /// Uri to be checked. + public static void CheckUriIsAbsoluteAndHttp(Uri uri) + { + CheckUriIsAbsoluteAndHttp(uri, true); + } + + /// + /// Check that a given Uri is an absolute Uri. Also, ensure its is http or https. + /// + /// Uri to be checked. + /// Allow sub path and query string if true. If false, the URL must be authority only. + public static void CheckUriIsAbsoluteAndHttp(Uri uri, bool allowPathAndQuery) + { + if (uri != null) + { + if (uri.IsAbsoluteUri == false) + { + throw new VssServiceException(CommonResources.UriUtility_AbsoluteUriRequired(uri.OriginalString)); + } + CheckUriIsHttp(uri, allowPathAndQuery); + } + } + + /// + /// Check the relative path to ensure it is valid. + /// + /// + public static void CheckRelativePath(string relativePath) + { + if (string.IsNullOrEmpty(relativePath) == false) + { + try + { + relativePath = relativePath.Replace("\\", "/"); + + Uri temp = new Uri(relativePath, UriKind.RelativeOrAbsolute); + if (temp.IsAbsoluteUri == true) + { + //It isn't a valid relative path so we disallow it. + throw new VssServiceException(CommonResources.UriUtility_RelativePathInvalid(relativePath)); + } + } + + catch (Exception) + { + //It isn't a valid relative path so we disallow it. + throw new VssServiceException(CommonResources.UriUtility_RelativePathInvalid(relativePath)); + } + } + } + + /// + /// Given an HTTP URI, returns the UNC URI as known to the Windows Client WebDav Redirector. + /// The UNC path is in the following form: + /// \\server[@SSL][@port][\path] + /// This function is equivalent to the native API by the same name on Vista/2008+, but provided here for compatibility with XP/2003. + /// + /// Well-formed http or https path. Alternate ports are supported. + /// UNC path + public static string GetDavUncFromHttpPath(string httppath) + { + Uri uri = new Uri(httppath, UriKind.Absolute); + CheckUriIsHttp(uri); + + System.Text.StringBuilder sb = new System.Text.StringBuilder(); + sb.Append(System.IO.Path.DirectorySeparatorChar); + sb.Append(System.IO.Path.DirectorySeparatorChar); + sb.Append(uri.Host); + + if (uri.Scheme == Uri.UriSchemeHttps) + { + sb.Append("@SSL"); + } + + if (!uri.IsDefaultPort) + { + sb.Append("@"); + sb.Append(uri.Port); + } + + sb.Append(System.IO.Path.DirectorySeparatorChar); + + // Add "DavWWWRoot" to be explicit (this is supported by redirector on all OS versions): + // + resistent to conflicts and ordering of UNC providers. + // + better performance on Vista (after KB945435 hotfix applied) since it goes directly + // to WebDAV redirector instead of trying each UNC provider which may timeout due to + // certain routers configured to drop packets on SMB ports (139, 445). + // - confusing to user => avoid displaying UNC path, cuz user may think there's + // supposed to be a "DavWWWRoot" directory when it's just a client name-mangling. + sb.Append("DavWWWRoot"); // don't localize! + sb.Append(System.IO.Path.DirectorySeparatorChar); + + string specpath = uri.GetComponents(UriComponents.Path, UriFormat.Unescaped); + string normpath = specpath.Replace('/', System.IO.Path.DirectorySeparatorChar); + sb.Append(normpath); + + return sb.ToString(); + } + + /// + /// Given a input path that might correspond to a UNC path known to the Windows Client WebDav Redirector, + /// returns the converted UNC path to an HTTP (or HTTPS) url. + /// + /// + /// + /// The UNC path is in the following form: + /// \\server[@SSL][@port][\path] + /// + public static Uri TryGetHttpUriFromDavUncPath(string uncPath) + { + if (uncPath == null) + { + throw new ArgumentNullException("uncPath"); + } + + // Some valid paths for this regex + // \\foo\DavWWWRoot\bar\baz + // \\foo@8888\DavWWWRoot\bar\baz + // \\foo@SSL\DavWWWRoot\bar\baz + // \\foo@SSL@3234\DavWWWRoot\bar\baz + Regex regex = new Regex(@"^\\\\(?[^\\|@]+)(?@SSL)?(@(?\d+))?\\DavWWWRoot\\(?.+)$", RegexOptions.IgnoreCase | RegexOptions.Singleline); + + Match match = regex.Match(uncPath); + if (match.Success) + { + Group hostGroup = match.Groups["host"]; + Group sslGroup = match.Groups["ssl"]; + Group portGroup = match.Groups["port"]; + Group pathGroup = match.Groups["path"]; + + string scheme = (!sslGroup.Success) ? Uri.UriSchemeHttp : Uri.UriSchemeHttps; + string hostName = hostGroup.Value; + string port = (!portGroup.Success) ? "" : ":" + portGroup.Value; + string path = pathGroup.Value.Replace(Path.DirectorySeparatorChar, '/'); + + string url = String.Format(CultureInfo.InvariantCulture, "{0}://{1}{2}/{3}", scheme, hostName, port, path); + return new Uri(url, UriKind.Absolute); + } + + return null; + } + + /// + /// Determine if two hostnames correspond to the same machine. + /// + /// First hostname + /// Second hostname + /// True, if same machine. + public static bool IsSameMachine(string hostname1, string hostname2) + { + bool isSame = false; + + try + { + if (string.IsNullOrEmpty(hostname1) || string.IsNullOrEmpty(hostname2)) + { + isSame = false; + } + else if (String.Equals(hostname1, hostname2, StringComparison.OrdinalIgnoreCase)) + { + isSame = true; + } + else + { + string host1Dns = Dns.GetHostEntry(hostname1).HostName; + string host2Dns = Dns.GetHostEntry(hostname2).HostName; + isSame = string.Equals(host1Dns, host2Dns, StringComparison.OrdinalIgnoreCase); + } + } + catch (SocketException)// ex) + { + // A machine name could not be resolved, for the purposes of this method, + // assume that machines are not the same and ignore the error + + // ToDo: tedchamb come back to this + //TeamFoundationTrace.TraceException(ex); + } + + return isSame; + } + + /// + /// Returns true if supplied domain is equal to or is a sub-domain of parentDomain + /// + public static Boolean IsSubdomainOf(string domain, string parentDomain) + { + // More efficient than: 'domain.Equals(parentDomain) || domain.EndsWith("." + parentDomain)' + return domain.EndsWith(parentDomain, StringComparison.Ordinal) && + (domain.Length == parentDomain.Length || domain[domain.Length - parentDomain.Length - 1] == '.'); + } + + /// + /// Verifies that the specified uri is valid or can be made into a valid http address by prepending 'http://' to it. + /// If the uri is not valid an exception is thrown. + /// + /// + /// The validated uri, including 'http://' if it was prepended to make it valid. + public static Uri GetAbsoluteUriFromString(string uriString) + { + Uri uri = GetUriFromString(uriString); + + if (uri == null) + { + throw new VssServiceException(CommonResources.UrlNotValid()); + } + + return uri; + } + + /// + /// Creates a URI from a string. Adds http to the front if its not there. + /// Requires that the Uri scheme be http or https. + /// + /// String to convert to an absolute uri. + /// The validated uri, including 'http://' if it was prepended to make it valid, or null if the uri is not valid. + public static Uri GetUriFromString(string val) + { + Uri uri; + if (TryCreateAbsoluteUri(val, true, out uri)) + { + return uri; + } + return null; + } + + /// + /// Creates an absolute URI from a string. Adds http to the front if its not there. + /// If 'requireHttpScheme' is 'true' this method will return false if the url + /// doesn't start with http or https. + /// + /// String to convert to an absolute uri. + /// 'true' to require that the scheme is http or https, 'false' to allow any scheme. + /// + /// Either the uri or 'null' if it is not valid. + public static bool TryCreateAbsoluteUri(string val, bool requireHttpScheme, out Uri uri) + { + uri = null; + + val = val != null ? val.Trim() : null; + + if (string.IsNullOrEmpty(val)) + { + return false; + } + + try + { + uri = new Uri(val); + } + catch (FormatException) + { + } + + // try adding http if the uri doesn't already start with http or https + if ((uri == null || !uri.IsAbsoluteUri) && + !VssStringComparer.Url.StartsWith(val, c_uriSchemeHttp) && + !VssStringComparer.Url.StartsWith(val, c_uriSchemeHttps)) + { + try + { + val = c_uriSchemeHttp + "://" + val; + uri = new Uri(val); + } + catch (FormatException) + { + } + } + + if (uri == null) + { + return false; + } + + if (requireHttpScheme && + !VssStringComparer.Url.StartsWith(uri.Scheme, c_uriSchemeHttp) && + !VssStringComparer.Url.StartsWith(uri.Scheme, c_uriSchemeHttps)) + { + return false; + } + + if (!uri.IsAbsoluteUri) + { + uri = null; + return false; + } + + return true; + } + + /// + /// Ensures that a relative path starts with a forward slash. + /// + /// The relative path. + public static string EnsureStartsWithPathSeparator(string relativePath) + { + if (relativePath != null && !VssStringComparer.Url.StartsWith(relativePath, PathSeparator)) + { + relativePath = PathSeparator + relativePath; + } + + return relativePath; + } + + /// + /// Ensures that a relative path ends with a forward slash. + /// + /// The relative path. + public static string EnsureEndsWithPathSeparator(string relativePath) + { + if (relativePath != null && !VssStringComparer.Url.EndsWith(relativePath, PathSeparator)) + { + relativePath += PathSeparator; + } + + return relativePath; + } + + /// + /// Trims any starting slashes from the input path. + /// + /// The relative path. + public static string TrimStartingPathSeparator(string relativePath) + { + return (relativePath != null) ? relativePath.TrimStart(PathSeparatorChar) : null; + } + + /// + /// Trims any ending slashes from the input path. + /// + /// The relative path. + public static string TrimEndingPathSeparator(string relativePath) + { + return (relativePath != null) ? relativePath.TrimEnd(PathSeparatorChar) : null; + } + + /// + /// Trims any starting or ending slashes from the input path. + /// + /// The relative path. + public static string TrimPathSeparators(string relativePath) + { + return (relativePath != null) ? relativePath.Trim(PathSeparatorChar) : null; + } + + public static String AppendSlashToPathIfNeeded(string path) + { + + if (path == null) return null; + + int l = path.Length; + if (l == 0) return path; + + if (path[l - 1] != '/') + path += '/'; + + return path; + } + + /// + /// Correct URI content to remove excess(duplicate) separators in path section. + /// + public static Uri NormalizePathSeparators(Uri uri) + { + //Skip normalization when content is properly formed. + if (uri.LocalPath.Contains("//")) + { + UriBuilder builder = new UriBuilder(uri); + string path = builder.Path; + + //Remove grouped slashes - bias towards robustness over algorithmic efficiency on this corner scenario. + while (path.Contains("//")) + { + path = path.Replace("//", "/"); + } + builder.Path = path; + uri = builder.Uri; + } + return uri; + } + +#region functionality forked from System.Web.HttpUtility + //************************************************************************************************* + // This region UriUtility contains functionality forked from System.Web.HttpUtility. + // Only our server assemblies can take a dependency on System.Web because it is not part of the + // .NET Framework "Client Profile". Client and common code that needs to use helper functions + // such as UrlEncode and UrlDecode from System.Web.HttpUtility must call the methods on this class + // instead to avoid a dependency on System.Web. + // + // Copyright (c) Microsoft Corporation. All rights reserved. + //************************************************************************************************* + + // Don't entity encode high chars (160 to 256), to fix bugs VSWhidbey 85857/111927 + // REVIEW: comment out this line to fix VSWhidbey 85857/111927, after we verify that it's safe to do so + +#region ParseFragmentString + + public static NameValueCollection ParseFragmentString(string fragment) + { + return ParseFragmentString(fragment, Encoding.UTF8); + } + + public static NameValueCollection ParseFragmentString(string fragment, Encoding encoding) + { + return ParseFragmentString(fragment, encoding, true); + } + + public static NameValueCollection ParseFragmentString(string fragment, Encoding encoding, Boolean urlEncoded) + { + ArgumentUtility.CheckForNull(fragment, "fragment"); + ArgumentUtility.CheckForNull(encoding, "encoding"); + + if (fragment.Length > 0 && fragment[0] == '#') + { + fragment = fragment.Substring(1); + } + + return new HttpValueCollection(fragment, false, urlEncoded, encoding); + } + +#endregion + +#region ParseQueryString + + // *** Source: ndp/fx/src/xsp/system/web/httpserverutility.cs + + public static NameValueCollection ParseQueryString(string query) + { + return ParseQueryString(query, Encoding.UTF8); + } + + public static NameValueCollection ParseQueryString(string query, Encoding encoding) + { + return ParseQueryString(query, encoding, true); + } + + public static NameValueCollection ParseQueryString(string query, Encoding encoding, Boolean urlEncoded) + { + ArgumentUtility.CheckForNull(query, "query"); + ArgumentUtility.CheckForNull(encoding, "encoding"); + + if (query.Length > 0 && query[0] == '?') + { + query = query.Substring(1); + } + + return new HttpValueCollection(query, false, urlEncoded, encoding); + } + +#endregion + +#region UrlEncode implementation + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoder.cs + + private static byte[] UrlEncode(byte[] bytes, int offset, int count, bool alwaysCreateNewReturnValue) + { + byte[] encoded = UrlEncode(bytes, offset, count); + + return (alwaysCreateNewReturnValue && (encoded != null) && (encoded == bytes)) + ? (byte[])encoded.Clone() + : encoded; + } + + private static byte[] UrlEncode(byte[] bytes, int offset, int count) + { + if (!ValidateUrlEncodingParameters(bytes, offset, count)) + { + return null; + } + + int cSpaces = 0; + int cUnsafe = 0; + + // count them first + for (int i = 0; i < count; i++) + { + char ch = (char)bytes[offset + i]; + + if (ch == ' ') + cSpaces++; + else if (!IsUrlSafeChar(ch)) + cUnsafe++; + } + + // nothing to expand? + if (cSpaces == 0 && cUnsafe == 0) + return bytes; + + // expand not 'safe' characters into %XX, spaces to +s + byte[] expandedBytes = new byte[count + cUnsafe * 2]; + int pos = 0; + + for (int i = 0; i < count; i++) + { + byte b = bytes[offset + i]; + char ch = (char)b; + + if (IsUrlSafeChar(ch)) + { + expandedBytes[pos++] = b; + } + else if (ch == ' ') + { + expandedBytes[pos++] = (byte)'+'; + } + else + { + expandedBytes[pos++] = (byte)'%'; + expandedBytes[pos++] = (byte)IntToHex((b >> 4) & 0xf); + expandedBytes[pos++] = (byte)IntToHex(b & 0x0f); + } + } + + return expandedBytes; + } + + // Helper to encode the non-ASCII url characters only + private static String UrlEncodeNonAscii(string str, Encoding e) + { + if (String.IsNullOrEmpty(str)) + return str; + if (e == null) + e = Encoding.UTF8; + byte[] bytes = e.GetBytes(str); + byte[] encodedBytes = UrlEncodeNonAscii(bytes, 0, bytes.Length, false /* alwaysCreateNewReturnValue */); + return Encoding.ASCII.GetString(encodedBytes); + } + + private static byte[] UrlEncodeNonAscii(byte[] bytes, int offset, int count, bool alwaysCreateNewReturnValue) + { + if (!ValidateUrlEncodingParameters(bytes, offset, count)) + { + return null; + } + + int cNonAscii = 0; + + // count them first + for (int i = 0; i < count; i++) + { + if (IsNonAsciiByte(bytes[offset + i])) + cNonAscii++; + } + + // nothing to expand? + if (!alwaysCreateNewReturnValue && cNonAscii == 0) + return bytes; + + // expand not 'safe' characters into %XX, spaces to +s + byte[] expandedBytes = new byte[count + cNonAscii * 2]; + int pos = 0; + + for (int i = 0; i < count; i++) + { + byte b = bytes[offset + i]; + + if (IsNonAsciiByte(b)) + { + expandedBytes[pos++] = (byte)'%'; + expandedBytes[pos++] = (byte)IntToHex((b >> 4) & 0xf); + expandedBytes[pos++] = (byte)IntToHex(b & 0x0f); + } + else + { + expandedBytes[pos++] = b; + } + } + + return expandedBytes; + } + +#endregion + +#region UrlEncode public methods + + // *** Source: ndp/fx/src/xsp/system/web/httpserverutility.cs + + public static string UrlEncode(string str) + { + if (str == null) + return null; + + return UrlEncode(str, Encoding.UTF8); + } + + public static string Base64Encode(string str) + { + if (str == null) + return null; + + return Convert.ToBase64String(Encoding.UTF8.GetBytes(str)); + } + + public static string UrlEncode(string str, Encoding e) + { + if (str == null) + return null; + + return Encoding.ASCII.GetString(UrlEncodeToBytes(str, e)); + } + public static string UrlEncode(byte[] bytes) + { + if (bytes == null) + return null; + + return Encoding.ASCII.GetString(UrlEncodeToBytes(bytes)); + } + + public static byte[] UrlEncodeToBytes(string str) + { + if (str == null) + return null; + + return UrlEncodeToBytes(str, Encoding.UTF8); + } + + public static byte[] UrlEncodeToBytes(byte[] bytes) + { + if (bytes == null) + return null; + + return UrlEncodeToBytes(bytes, 0, bytes.Length); + } + + public static byte[] UrlEncodeToBytes(string str, Encoding e) + { + if (str == null) + return null; + + byte[] bytes = e.GetBytes(str); + return UrlEncode(bytes, 0, bytes.Length, false /* alwaysCreateNewReturnValue */); + } + + public static byte[] UrlEncodeToBytes(byte[] bytes, int offset, int count) + { + return UrlEncode(bytes, offset, count, true /* alwaysCreateNewReturnValue */); + } + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoder.cs + + public static string UrlPathEncode(string str) + { + if (String.IsNullOrEmpty(str)) + { + return str; + } + + // recurse in case there is a query string + int i = str.IndexOf('?'); + if (i >= 0) + return UrlPathEncode(str.Substring(0, i)) + str.Substring(i); + + // encode DBCS characters and spaces only + return UrlEncodeSpaces(UrlEncodeNonAscii(str, Encoding.UTF8)); + } + +#endregion + +#region UrlEncodeUnicode + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoder.cs + + public static string UrlEncodeUnicode(string value) + { + if (value == null) + { + return null; + } + + int l = value.Length; + StringBuilder sb = new StringBuilder(l); + + for (int i = 0; i < l; i++) + { + char ch = value[i]; + + if ((ch & 0xff80) == 0) + { // 7 bit? + if (IsUrlSafeChar(ch)) + { + sb.Append(ch); + } + else if (ch == ' ') + { + sb.Append('+'); + } + else + { + sb.Append('%'); + sb.Append(IntToHex((ch >> 4) & 0xf)); + sb.Append(IntToHex((ch) & 0xf)); + } + } + else + { // arbitrary Unicode? + sb.Append("%u"); + sb.Append(IntToHex((ch >> 12) & 0xf)); + sb.Append(IntToHex((ch >> 8) & 0xf)); + sb.Append(IntToHex((ch >> 4) & 0xf)); + sb.Append(IntToHex((ch) & 0xf)); + } + } + + return sb.ToString(); + } + +#endregion + +#region HttpValueCollection nested class + + // *** Source: ndp/fx/src/xsp/system/web/httpvaluecollection.cs + + [Serializable()] + internal class HttpValueCollection : NameValueCollection + { + internal HttpValueCollection() + : base(StringComparer.OrdinalIgnoreCase) + { + } + + internal HttpValueCollection(String str, bool readOnly, bool urlencoded, Encoding encoding) + : base(StringComparer.OrdinalIgnoreCase) + { + if (!String.IsNullOrEmpty(str)) + FillFromString(str, urlencoded, encoding); + + IsReadOnly = readOnly; + } + + internal HttpValueCollection(int capacity) + : base(capacity, StringComparer.OrdinalIgnoreCase) + { + } + + protected HttpValueCollection(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + internal void MakeReadOnly() + { + IsReadOnly = true; + } + + internal void MakeReadWrite() + { + IsReadOnly = false; + } + + internal void FillFromString(String s) + { + FillFromString(s, false, null); + } + + internal void FillFromString(String s, bool urlencoded, Encoding encoding) + { + int l = (s != null) ? s.Length : 0; + int i = 0; + + while (i < l) + { + // find next & while noting first = on the way (and if there are more) + + int si = i; + int ti = -1; + + while (i < l) + { + char ch = s[i]; + + if (ch == '=') + { + if (ti < 0) + ti = i; + } + else if (ch == '&') + { + break; + } + + i++; + } + + // extract the name / value pair + + String name = null; + String value = null; + + if (ti >= 0) + { + name = s.Substring(si, ti - si); + value = s.Substring(ti + 1, i - ti - 1); + } + else + { + value = s.Substring(si, i - si); + } + + // add name / value pair to the collection + + if (urlencoded) + base.Add( + UriUtility.UrlDecode(name, encoding), + UriUtility.UrlDecode(value, encoding)); + else + base.Add(name, value); + + // trailing '&' + + if (i == l - 1 && s[i] == '&') + base.Add(null, String.Empty); + + i++; + } + } + + internal void FillFromEncodedBytes(byte[] bytes, Encoding encoding) + { + int l = (bytes != null) ? bytes.Length : 0; + int i = 0; + + while (i < l) + { + // find next & while noting first = on the way (and if there are more) + + int si = i; + int ti = -1; + + while (i < l) + { + byte b = bytes[i]; + + if (b == '=') + { + if (ti < 0) + ti = i; + } + else if (b == '&') + { + break; + } + + i++; + } + + // extract the name / value pair + + String name, value; + + if (ti >= 0) + { + name = UriUtility.UrlDecode(bytes, si, ti - si, encoding); + value = UriUtility.UrlDecode(bytes, ti + 1, i - ti - 1, encoding); + } + else + { + name = null; + value = UriUtility.UrlDecode(bytes, si, i - si, encoding); + } + + // add name / value pair to the collection + + base.Add(name, value); + + // trailing '&' + + if (i == l - 1 && bytes[i] == '&') + base.Add(null, String.Empty); + + i++; + } + } + + internal void Reset() + { + base.Clear(); + } + + public override String ToString() + { + return ToString(true); + } + + internal virtual String ToString(bool urlencoded) + { + return ToString(urlencoded, null); + } + + internal virtual String ToString(bool urlencoded, IDictionary excludeKeys) + { + int n = Count; + if (n == 0) + return String.Empty; + + StringBuilder s = new StringBuilder(); + String key, keyPrefix, item; + + for (int i = 0; i < n; i++) + { + key = GetKey(i); + + if (excludeKeys != null && key != null && excludeKeys[key] != null) + continue; + if (urlencoded) + key = UriUtility.UrlEncodeUnicode(key); + keyPrefix = (key != null) ? (key + "=") : String.Empty; + + ArrayList values = (ArrayList)BaseGet(i); + int numValues = (values != null) ? values.Count : 0; + + if (s.Length > 0) + s.Append('&'); + + if (numValues == 1) + { + s.Append(keyPrefix); + item = (String)values[0]; + if (urlencoded) + item = UriUtility.UrlEncodeUnicode(item); + s.Append(item); + } + else if (numValues == 0) + { + s.Append(keyPrefix); + } + else + { + for (int j = 0; j < numValues; j++) + { + if (j > 0) + s.Append('&'); + s.Append(keyPrefix); + item = (String)values[j]; + if (urlencoded) + item = UriUtility.UrlEncodeUnicode(item); + s.Append(item); + } + } + } + + return s.ToString(); + } + } + +#endregion + +#region HtmlEncode + + // *** Source: ndp/fx/src/net/system/net/webutility.cs + + public static string HtmlEncode(string value) + { + if (String.IsNullOrEmpty(value)) + { + return value; + } + + // Don't create string writer if we don't have nothing to encode + int index = IndexOfHtmlEncodingChars(value, 0); + if (index == -1) + { + return value; + } + + using (StringWriter writer = new StringWriter(CultureInfo.InvariantCulture)) + { + HtmlEncode(value, writer); + return writer.ToString(); + } + } + + public static unsafe void HtmlEncode(string value, TextWriter output) + { + if (value == null) + { + return; + } + if (output == null) + { + throw new ArgumentNullException("output"); + } + + int index = IndexOfHtmlEncodingChars(value, 0); + if (index == -1) + { + output.Write(value); + return; + } + + Debug.Assert(0 <= index && index <= value.Length, "0 <= index && index <= value.Length"); + + int cch = value.Length - index; + fixed (char* str = value) + { + char* pch = str; + while (index-- > 0) + { + output.Write(*pch++); + } + + while (cch-- > 0) + { + char ch = *pch++; + if (ch <= '>') + { + switch (ch) + { + case '<': + output.Write("<"); + break; + case '>': + output.Write(">"); + break; + case '"': + output.Write("""); + break; + case '\'': + output.Write("'"); + break; + case '&': + output.Write("&"); + break; + default: + output.Write(ch); + break; + } + } +#if ENTITY_ENCODE_HIGH_ASCII_CHARS + else if (ch >= 160 && ch < 256) + { + // The seemingly arbitrary 160 comes from RFC + output.Write("&#"); + output.Write(((int)ch).ToString(NumberFormatInfo.InvariantInfo)); + output.Write(';'); + } +#endif // ENTITY_ENCODE_HIGH_ASCII_CHARS + else + { + output.Write(ch); + } + } + } + } + + #endregion + +#region HtmlEncode/Decode helper methods + + // *** Source: ndp/fx/src/net/system/net/webutility.cs + + private static unsafe int IndexOfHtmlEncodingChars(string s, int startPos) + { + Debug.Assert(0 <= startPos && startPos <= s.Length, "0 <= startPos && startPos <= s.Length"); + + int cch = s.Length - startPos; + fixed (char* str = s) + { + for (char* pch = &str[startPos]; cch > 0; pch++, cch--) + { + char ch = *pch; + if (ch <= '>') + { + switch (ch) + { + case '<': + case '>': + case '"': + case '\'': + case '&': + return s.Length - cch; + } + } +#if ENTITY_ENCODE_HIGH_ASCII_CHARS + else if (ch >= 160 && ch < 256) + { + return s.Length - cch; + } +#endif // ENTITY_ENCODE_HIGH_ASCII_CHARS + } + } + + return -1; + } + +#endregion + +#region UrlDecode implementation + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoder.cs + + private static string UrlDecodeInternal(string value, Encoding encoding) + { + if (value == null) + { + return null; + } + + int count = value.Length; + UrlDecoder helper = new UrlDecoder(count, encoding); + + // go through the string's chars collapsing %XX and %uXXXX and + // appending each char as char, with exception of %XX constructs + // that are appended as bytes + + for (int pos = 0; pos < count; pos++) + { + char ch = value[pos]; + + if (ch == '+') + { + ch = ' '; + } + else if (ch == '%' && pos < count - 2) + { + if (value[pos + 1] == 'u' && pos < count - 5) + { + int h1 = HexToInt(value[pos + 2]); + int h2 = HexToInt(value[pos + 3]); + int h3 = HexToInt(value[pos + 4]); + int h4 = HexToInt(value[pos + 5]); + + if (h1 >= 0 && h2 >= 0 && h3 >= 0 && h4 >= 0) + { // valid 4 hex chars + ch = (char)((h1 << 12) | (h2 << 8) | (h3 << 4) | h4); + pos += 5; + + // only add as char + helper.AddChar(ch); + continue; + } + } + else + { + int h1 = HexToInt(value[pos + 1]); + int h2 = HexToInt(value[pos + 2]); + + if (h1 >= 0 && h2 >= 0) + { // valid 2 hex chars + byte b = (byte)((h1 << 4) | h2); + pos += 2; + + // don't add as char + helper.AddByte(b); + continue; + } + } + } + + if ((ch & 0xFF80) == 0) + helper.AddByte((byte)ch); // 7 bit have to go as bytes because of Unicode + else + helper.AddChar(ch); + } + + return helper.GetString(); + } + + private static byte[] UrlDecodeInternal(byte[] bytes, int offset, int count) + { + if (!ValidateUrlEncodingParameters(bytes, offset, count)) + { + return null; + } + + int decodedBytesCount = 0; + byte[] decodedBytes = new byte[count]; + + for (int i = 0; i < count; i++) + { + int pos = offset + i; + byte b = bytes[pos]; + + if (b == '+') + { + b = (byte)' '; + } + else if (b == '%' && i < count - 2) + { + int h1 = HexToInt((char)bytes[pos + 1]); + int h2 = HexToInt((char)bytes[pos + 2]); + + if (h1 >= 0 && h2 >= 0) + { // valid 2 hex chars + b = (byte)((h1 << 4) | h2); + i += 2; + } + } + + decodedBytes[decodedBytesCount++] = b; + } + + if (decodedBytesCount < decodedBytes.Length) + { + byte[] newDecodedBytes = new byte[decodedBytesCount]; + Array.Copy(decodedBytes, newDecodedBytes, decodedBytesCount); + decodedBytes = newDecodedBytes; + } + + return decodedBytes; + } + + private static string UrlDecodeInternal(byte[] bytes, int offset, int count, Encoding encoding) + { + if (!ValidateUrlEncodingParameters(bytes, offset, count)) + { + return null; + } + + UrlDecoder helper = new UrlDecoder(count, encoding); + + // go through the bytes collapsing %XX and %uXXXX and appending + // each byte as byte, with exception of %uXXXX constructs that + // are appended as chars + + for (int i = 0; i < count; i++) + { + int pos = offset + i; + byte b = bytes[pos]; + + // The code assumes that + and % cannot be in multibyte sequence + + if (b == '+') + { + b = (byte)' '; + } + else if (b == '%' && i < count - 2) + { + if (bytes[pos + 1] == 'u' && i < count - 5) + { + int h1 = HexToInt((char)bytes[pos + 2]); + int h2 = HexToInt((char)bytes[pos + 3]); + int h3 = HexToInt((char)bytes[pos + 4]); + int h4 = HexToInt((char)bytes[pos + 5]); + + if (h1 >= 0 && h2 >= 0 && h3 >= 0 && h4 >= 0) + { // valid 4 hex chars + char ch = (char)((h1 << 12) | (h2 << 8) | (h3 << 4) | h4); + i += 5; + + // don't add as byte + helper.AddChar(ch); + continue; + } + } + else + { + int h1 = HexToInt((char)bytes[pos + 1]); + int h2 = HexToInt((char)bytes[pos + 2]); + + if (h1 >= 0 && h2 >= 0) + { // valid 2 hex chars + b = (byte)((h1 << 4) | h2); + i += 2; + } + } + } + + helper.AddByte(b); + } + + return helper.GetString(); + } + +#endregion + +#region UrlDecode public methods + + // *** Source: ndp/fx/src/xsp/system/web/httpserverutility.cs + + public static string UrlDecode(string str) + { + if (str == null) + return null; + + return UrlDecode(str, Encoding.UTF8); + } + + public static string Base64Decode(string str) + { + if (str == null) + { + return null; + } + return Encoding.UTF8.GetString(Convert.FromBase64String(str)); + } + + public static string UrlDecode(string str, Encoding e) + { + return UrlDecodeInternal(str, e); + } + + public static string UrlDecode(byte[] bytes, Encoding e) + { + if (bytes == null) + return null; + + return UrlDecode(bytes, 0, bytes.Length, e); + } + + public static string UrlDecode(byte[] bytes, int offset, int count, Encoding e) + { + return UrlDecodeInternal(bytes, offset, count, e); + } + + public static byte[] UrlDecodeToBytes(string str) + { + if (str == null) + return null; + + return UrlDecodeToBytes(str, Encoding.UTF8); + } + + public static byte[] UrlDecodeToBytes(string str, Encoding e) + { + if (str == null) + return null; + + return UrlDecodeToBytes(e.GetBytes(str)); + } + + public static byte[] UrlDecodeToBytes(byte[] bytes) + { + if (bytes == null) + return null; + + return UrlDecodeToBytes(bytes, 0, (bytes != null) ? bytes.Length : 0); + } + + public static byte[] UrlDecodeToBytes(byte[] bytes, int offset, int count) + { + return UrlDecodeInternal(bytes, offset, count); + } + +#endregion + +#region Helper methods + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoderutility.cs + + public static int HexToInt(char h) + { + return (h >= '0' && h <= '9') ? h - '0' : + (h >= 'a' && h <= 'f') ? h - 'a' + 10 : + (h >= 'A' && h <= 'F') ? h - 'A' + 10 : + -1; + } + + public static char IntToHex(int n) + { + Debug.Assert(n < 0x10); + + if (n <= 9) + return (char)(n + (int)'0'); + else + return (char)(n - 10 + (int)'a'); + } + + // Set of safe chars, from RFC 1738.4 minus '+' + public static bool IsUrlSafeChar(char ch) + { + if (ch >= 'a' && ch <= 'z' || ch >= 'A' && ch <= 'Z' || ch >= '0' && ch <= '9') + return true; + + switch (ch) + { + case '-': + case '_': + case '.': + case '!': + case '*': + case '(': + case ')': + return true; + } + + return false; + } + + // Helper to encode spaces only + internal static String UrlEncodeSpaces(string str) + { + if (str != null && str.IndexOf(' ') >= 0) + str = str.Replace(" ", "%20"); + return str; + } + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoder.cs + + private static bool ValidateUrlEncodingParameters(byte[] bytes, int offset, int count) + { + if (bytes == null && count == 0) + return false; + if (bytes == null) + { + throw new ArgumentNullException("bytes"); + } + if (offset < 0 || offset > bytes.Length) + { + throw new ArgumentOutOfRangeException("offset"); + } + if (count < 0 || offset + count > bytes.Length) + { + throw new ArgumentOutOfRangeException("count"); + } + + return true; + } + + private static bool IsNonAsciiByte(byte b) + { + return (b >= 0x7F || b < 0x20); + } + +#endregion + +#region UrlDecoder nested class + + // *** Source: ndp/fx/src/xsp/system/web/util/httpencoder.cs + + // Internal class to facilitate URL decoding -- keeps char buffer and byte buffer, allows appending of either chars or bytes + private class UrlDecoder + { + private int _bufferSize; + + // Accumulate characters in a special array + private int _numChars; + private char[] _charBuffer; + + // Accumulate bytes for decoding into characters in a special array + private int _numBytes; + private byte[] _byteBuffer; + + // Encoding to convert chars to bytes + private Encoding _encoding; + + private void FlushBytes() + { + if (_numBytes > 0) + { + _numChars += _encoding.GetChars(_byteBuffer, 0, _numBytes, _charBuffer, _numChars); + _numBytes = 0; + } + } + + internal UrlDecoder(int bufferSize, Encoding encoding) + { + _bufferSize = bufferSize; + _encoding = encoding; + + _charBuffer = new char[bufferSize]; + // byte buffer created on demand + } + + internal void AddChar(char ch) + { + if (_numBytes > 0) + FlushBytes(); + + _charBuffer[_numChars++] = ch; + } + + internal void AddByte(byte b) + { + if (_byteBuffer == null) + _byteBuffer = new byte[_bufferSize]; + + _byteBuffer[_numBytes++] = b; + } + + internal String GetString() + { + if (_numBytes > 0) + FlushBytes(); + + if (_numChars > 0) + return new String(_charBuffer, 0, _numChars); + else + return String.Empty; + } + } + +#endregion + +#region HtmlDecode + + // *** Source: ndp/fx/src/net/system/net/webutility.cs + + public static string HtmlDecode(string value) + { + if (String.IsNullOrEmpty(value)) + { + return value; + } + + // Don't create string writer if we don't have nothing to encode + if (value.IndexOf('&') < 0) + { + return value; + } + + using (StringWriter writer = new StringWriter(CultureInfo.InvariantCulture)) + { + HtmlDecode(value, writer); + return writer.ToString(); + } + } + + [SuppressMessage("Microsoft.Usage", "CA1806:DoNotIgnoreMethodResults", MessageId = "System.UInt16.TryParse(System.String,System.Globalization.NumberStyles,System.IFormatProvider,System.UInt16@)", Justification = "UInt16.TryParse guarantees that result is zero if the parse fails.")] + public static void HtmlDecode(string value, TextWriter output) + { + if (value == null) + { + return; + } + if (output == null) + { + throw new ArgumentNullException("output"); + } + + if (value.IndexOf('&') < 0) + { + output.Write(value); // good as is + return; + } + + int l = value.Length; + for (int i = 0; i < l; i++) + { + char ch = value[i]; + + if (ch == '&') + { + // We found a '&'. Now look for the next ';' or '&'. The idea is that + // if we find another '&' before finding a ';', then this is not an entity, + // and the next '&' might start a real entity (VSWhidbey 275184) + int index = value.IndexOfAny(_htmlEntityEndingChars, i + 1); + if (index > 0 && value[index] == ';') + { + string entity = value.Substring(i + 1, index - i - 1); + + if (entity.Length > 1 && entity[0] == '#') + { + // The # syntax can be in decimal or hex, e.g. + // å --> decimal + // å --> same char in hex + // See http://www.w3.org/TR/REC-html40/charset.html#entities + + ushort parsed; + if (entity[1] == 'x' || entity[1] == 'X') + { + UInt16.TryParse(entity.Substring(2), NumberStyles.AllowHexSpecifier, NumberFormatInfo.InvariantInfo, out parsed); + } + else + { + UInt16.TryParse(entity.Substring(1), NumberStyles.Integer, NumberFormatInfo.InvariantInfo, out parsed); + } + + if (parsed != 0) + { + ch = (char)parsed; + i = index; // already looked at everything until semicolon + } + } + else + { + i = index; // already looked at everything until semicolon + + char entityChar = HtmlEntities.Lookup(entity); + if (entityChar != (char)0) + { + ch = entityChar; + } + else + { + output.Write('&'); + output.Write(entity); + output.Write(';'); + continue; + } + } + + } + } + + output.Write(ch); + } + } + +#endregion + +#region HtmlEntities nested class + + // *** Source: ndp/fx/src/net/system/net/webutility.cs + + // helper class for lookup of HTML encoding entities + private static class HtmlEntities + { + // The list is from http://www.w3.org/TR/REC-html40/sgml/entities.html, except for ', which + // is defined in http://www.w3.org/TR/2008/REC-xml-20081126/#sec-predefined-ent. + + private static String[] _entitiesList = new String[] { + "\x0022-quot", + "\x0026-amp", + "\x0027-apos", + "\x003c-lt", + "\x003e-gt", + "\x00a0-nbsp", + "\x00a1-iexcl", + "\x00a2-cent", + "\x00a3-pound", + "\x00a4-curren", + "\x00a5-yen", + "\x00a6-brvbar", + "\x00a7-sect", + "\x00a8-uml", + "\x00a9-copy", + "\x00aa-ordf", + "\x00ab-laquo", + "\x00ac-not", + "\x00ad-shy", + "\x00ae-reg", + "\x00af-macr", + "\x00b0-deg", + "\x00b1-plusmn", + "\x00b2-sup2", + "\x00b3-sup3", + "\x00b4-acute", + "\x00b5-micro", + "\x00b6-para", + "\x00b7-middot", + "\x00b8-cedil", + "\x00b9-sup1", + "\x00ba-ordm", + "\x00bb-raquo", + "\x00bc-frac14", + "\x00bd-frac12", + "\x00be-frac34", + "\x00bf-iquest", + "\x00c0-Agrave", + "\x00c1-Aacute", + "\x00c2-Acirc", + "\x00c3-Atilde", + "\x00c4-Auml", + "\x00c5-Aring", + "\x00c6-AElig", + "\x00c7-Ccedil", + "\x00c8-Egrave", + "\x00c9-Eacute", + "\x00ca-Ecirc", + "\x00cb-Euml", + "\x00cc-Igrave", + "\x00cd-Iacute", + "\x00ce-Icirc", + "\x00cf-Iuml", + "\x00d0-ETH", + "\x00d1-Ntilde", + "\x00d2-Ograve", + "\x00d3-Oacute", + "\x00d4-Ocirc", + "\x00d5-Otilde", + "\x00d6-Ouml", + "\x00d7-times", + "\x00d8-Oslash", + "\x00d9-Ugrave", + "\x00da-Uacute", + "\x00db-Ucirc", + "\x00dc-Uuml", + "\x00dd-Yacute", + "\x00de-THORN", + "\x00df-szlig", + "\x00e0-agrave", + "\x00e1-aacute", + "\x00e2-acirc", + "\x00e3-atilde", + "\x00e4-auml", + "\x00e5-aring", + "\x00e6-aelig", + "\x00e7-ccedil", + "\x00e8-egrave", + "\x00e9-eacute", + "\x00ea-ecirc", + "\x00eb-euml", + "\x00ec-igrave", + "\x00ed-iacute", + "\x00ee-icirc", + "\x00ef-iuml", + "\x00f0-eth", + "\x00f1-ntilde", + "\x00f2-ograve", + "\x00f3-oacute", + "\x00f4-ocirc", + "\x00f5-otilde", + "\x00f6-ouml", + "\x00f7-divide", + "\x00f8-oslash", + "\x00f9-ugrave", + "\x00fa-uacute", + "\x00fb-ucirc", + "\x00fc-uuml", + "\x00fd-yacute", + "\x00fe-thorn", + "\x00ff-yuml", + "\x0152-OElig", + "\x0153-oelig", + "\x0160-Scaron", + "\x0161-scaron", + "\x0178-Yuml", + "\x0192-fnof", + "\x02c6-circ", + "\x02dc-tilde", + "\x0391-Alpha", + "\x0392-Beta", + "\x0393-Gamma", + "\x0394-Delta", + "\x0395-Epsilon", + "\x0396-Zeta", + "\x0397-Eta", + "\x0398-Theta", + "\x0399-Iota", + "\x039a-Kappa", + "\x039b-Lambda", + "\x039c-Mu", + "\x039d-Nu", + "\x039e-Xi", + "\x039f-Omicron", + "\x03a0-Pi", + "\x03a1-Rho", + "\x03a3-Sigma", + "\x03a4-Tau", + "\x03a5-Upsilon", + "\x03a6-Phi", + "\x03a7-Chi", + "\x03a8-Psi", + "\x03a9-Omega", + "\x03b1-alpha", + "\x03b2-beta", + "\x03b3-gamma", + "\x03b4-delta", + "\x03b5-epsilon", + "\x03b6-zeta", + "\x03b7-eta", + "\x03b8-theta", + "\x03b9-iota", + "\x03ba-kappa", + "\x03bb-lambda", + "\x03bc-mu", + "\x03bd-nu", + "\x03be-xi", + "\x03bf-omicron", + "\x03c0-pi", + "\x03c1-rho", + "\x03c2-sigmaf", + "\x03c3-sigma", + "\x03c4-tau", + "\x03c5-upsilon", + "\x03c6-phi", + "\x03c7-chi", + "\x03c8-psi", + "\x03c9-omega", + "\x03d1-thetasym", + "\x03d2-upsih", + "\x03d6-piv", + "\x2002-ensp", + "\x2003-emsp", + "\x2009-thinsp", + "\x200c-zwnj", + "\x200d-zwj", + "\x200e-lrm", + "\x200f-rlm", + "\x2013-ndash", + "\x2014-mdash", + "\x2018-lsquo", + "\x2019-rsquo", + "\x201a-sbquo", + "\x201c-ldquo", + "\x201d-rdquo", + "\x201e-bdquo", + "\x2020-dagger", + "\x2021-Dagger", + "\x2022-bull", + "\x2026-hellip", + "\x2030-permil", + "\x2032-prime", + "\x2033-Prime", + "\x2039-lsaquo", + "\x203a-rsaquo", + "\x203e-oline", + "\x2044-frasl", + "\x20ac-euro", + "\x2111-image", + "\x2118-weierp", + "\x211c-real", + "\x2122-trade", + "\x2135-alefsym", + "\x2190-larr", + "\x2191-uarr", + "\x2192-rarr", + "\x2193-darr", + "\x2194-harr", + "\x21b5-crarr", + "\x21d0-lArr", + "\x21d1-uArr", + "\x21d2-rArr", + "\x21d3-dArr", + "\x21d4-hArr", + "\x2200-forall", + "\x2202-part", + "\x2203-exist", + "\x2205-empty", + "\x2207-nabla", + "\x2208-isin", + "\x2209-notin", + "\x220b-ni", + "\x220f-prod", + "\x2211-sum", + "\x2212-minus", + "\x2217-lowast", + "\x221a-radic", + "\x221d-prop", + "\x221e-infin", + "\x2220-ang", + "\x2227-and", + "\x2228-or", + "\x2229-cap", + "\x222a-cup", + "\x222b-int", + "\x2234-there4", + "\x223c-sim", + "\x2245-cong", + "\x2248-asymp", + "\x2260-ne", + "\x2261-equiv", + "\x2264-le", + "\x2265-ge", + "\x2282-sub", + "\x2283-sup", + "\x2284-nsub", + "\x2286-sube", + "\x2287-supe", + "\x2295-oplus", + "\x2297-otimes", + "\x22a5-perp", + "\x22c5-sdot", + "\x2308-lceil", + "\x2309-rceil", + "\x230a-lfloor", + "\x230b-rfloor", + "\x2329-lang", + "\x232a-rang", + "\x25ca-loz", + "\x2660-spades", + "\x2663-clubs", + "\x2665-hearts", + "\x2666-diams", + }; + + private static Dictionary _lookupTable = GenerateLookupTable(); + + private static Dictionary GenerateLookupTable() + { + // e[0] is unicode char, e[1] is '-', e[2+] is entity string + + Dictionary lookupTable = new Dictionary(StringComparer.Ordinal); + foreach (string e in _entitiesList) + { + lookupTable.Add(e.Substring(2), e[0]); + } + + return lookupTable; + } + + public static char Lookup(string entity) + { + char theChar; + _lookupTable.TryGetValue(entity, out theChar); + return theChar; + } + } + +#endregion + + // *** Source: ndp/fx/src/net/system/net/webutility.cs + private static char[] _htmlEntityEndingChars = new char[] { ';', '&' }; +#endregion + } +} diff --git a/src/Sdk/Common/Common/Utility/VssStringComparer.cs b/src/Sdk/Common/Common/Utility/VssStringComparer.cs new file mode 100644 index 00000000000..c156da962e5 --- /dev/null +++ b/src/Sdk/Common/Common/Utility/VssStringComparer.cs @@ -0,0 +1,286 @@ +// ************************************************************************************************ +// Microsoft Team Foundation +// +// Microsoft Confidential +// Copyright (c) Microsoft Corporation. All rights reserved. +// +// File: VssStringComparer.cs +// Area: Team Foundation +// Classes: VssStringComparer +// Contents: The Team Foundation string comparison class provides inner classes +// that are used to provide semantic-specific Equals and Compare methods +// and a semantic-specific StringComparer instance. New semantics should +// be added on an as-needed basis. +// ************************************************************************************************ +using System; +using System.Diagnostics; + +namespace GitHub.Services.Common +{ + + // NOTE: Since the recommendations are for Ordinal and OrdinalIgnoreCase, no need to explain those, but + // please explain any instances using non-Ordinal comparisons (CurrentCulture, InvariantCulture) + // so that developers following you can understand the choices and verify they are correct. + + // NOTE: please try to keep the semantic-named properties in alphabetical order to ease merges + + // NOTE: do NOT add xml doc comments - everything in here should be a very thin wrapper around String + // or StringComparer. The usage of the methods and properties in this class should be intuitively + // obvious, so please don't add xml doc comments to this class since it should be wholly internal + // by the time we ship. + + // NOTE: Current guidelines from the CLR team (Dave Fetterman) is to stick with the same operation for both + // Compare and Equals for a given semantic inner class. This has the nice side effect that you don't + // get different behavior between calling Equals or calling Compare == 0. This may seem odd given the + // recommendations about using CurrentCulture for UI operations and Compare being used for sorting + // items for user display in many cases, but we need to have the type of string data determine the + // string comparison enum to use instead of the consumer of the comparison operation so that we're + // consistent in how we treat a given semantic. + + // VssStringComparer should act like StringComparer with a few additional methods for usefulness (Contains, + // StartsWith, EndsWith, etc.) so that it can be a "one-stop shop" for string comparisons. + public class VssStringComparer : StringComparer + { + private StringComparison m_stringComparison; + private StringComparer m_stringComparer; + + protected VssStringComparer(StringComparison stringComparison) + : base() + { + m_stringComparison = stringComparison; + } + + // pass-through implementations based on our current StringComparison setting + public override int Compare(string x, string y) { return String.Compare(x, y, m_stringComparison); } + public override bool Equals(string x, string y) { return String.Equals(x, y, m_stringComparison); } + public override int GetHashCode(string x) { return MatchingStringComparer.GetHashCode(x); } + + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "y")] + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "x")] + public int Compare(string x, int indexX, string y, int indexY, int length) { return String.Compare(x, indexX, y, indexY, length, m_stringComparison); } + + // add new useful methods here + public bool Contains(string main, string pattern) + { + ArgumentUtility.CheckForNull(main, "main"); + ArgumentUtility.CheckForNull(pattern, "pattern"); + + return main.IndexOf(pattern, m_stringComparison) >= 0; + } + + public int IndexOf(string main, string pattern) + { + ArgumentUtility.CheckForNull(main, "main"); + ArgumentUtility.CheckForNull(pattern, "pattern"); + + return main.IndexOf(pattern, m_stringComparison); + } + + public bool StartsWith(string main, string pattern) + { + ArgumentUtility.CheckForNull(main, "main"); + ArgumentUtility.CheckForNull(pattern, "pattern"); + + return main.StartsWith(pattern, m_stringComparison); + } + + public bool EndsWith(string main, string pattern) + { + ArgumentUtility.CheckForNull(main, "main"); + ArgumentUtility.CheckForNull(pattern, "pattern"); + + return main.EndsWith(pattern, m_stringComparison); + } + + private StringComparer MatchingStringComparer + { + get + { + if (m_stringComparer == null) + { + switch (m_stringComparison) + { + case StringComparison.CurrentCulture: + m_stringComparer = StringComparer.CurrentCulture; + break; + + case StringComparison.CurrentCultureIgnoreCase: + m_stringComparer = StringComparer.CurrentCultureIgnoreCase; + break; + + case StringComparison.Ordinal: + m_stringComparer = StringComparer.Ordinal; + break; + + case StringComparison.OrdinalIgnoreCase: + m_stringComparer = StringComparer.OrdinalIgnoreCase; + break; + + default: + Debug.Assert(false, "Unknown StringComparison value"); + m_stringComparer = StringComparer.Ordinal; + break; + } + } + return m_stringComparer; + } + } + + protected static VssStringComparer s_ordinal = new VssStringComparer(StringComparison.Ordinal); + protected static VssStringComparer s_ordinalIgnoreCase = new VssStringComparer(StringComparison.OrdinalIgnoreCase); + protected static VssStringComparer s_currentCulture = new VssStringComparer(StringComparison.CurrentCulture); + protected static VssStringComparer s_currentCultureIgnoreCase = new VssStringComparer(StringComparison.CurrentCultureIgnoreCase); + private static VssStringComparer s_dataSourceIgnoreProtocol = new DataSourceIgnoreProtocolComparer(); + + + public static VssStringComparer ActiveDirectoryEntityIdComparer { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ArtifactType { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ArtifactTool { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer AssemblyName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ContentType { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DomainName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DomainNameUI { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer DatabaseCategory { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DatabaseName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DataSource { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DataSourceIgnoreProtocol { get { return s_dataSourceIgnoreProtocol; } } + public static VssStringComparer DirectoryName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DirectoryEntityIdentifierConstants { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DirectoryEntityPropertyComparer { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DirectoryEntityTypeComparer { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DirectoryEntryNameComparer { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer DirectoryKeyStringComparer { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer EncodingName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer EnvVar { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ExceptionSource { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer FilePath { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer FilePathUI { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer Guid { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer Hostname { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer HostnameUI { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer HttpRequestMethod { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityDescriptor { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityDomain { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityOriginId { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityType { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer LinkName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer MachineName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer MailAddress { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer PropertyName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer RegistrationAttributeName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ReservedGroupName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer WMDSchemaClassName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer SamAccountName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer AccountName { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer SocialType { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer ServerUrl { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ServerUrlUI { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer ServiceInterface { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ServicingOperation { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ToolId { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer Url { get { return s_ordinal; } } + public static VssStringComparer UrlPath { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer UriScheme { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer UriAuthority { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer UserId { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer UserName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer UserNameUI { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer XmlAttributeName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer XmlNodeName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer XmlElement { get { return s_ordinal; } } + public static VssStringComparer XmlAttributeValue { get { return s_ordinalIgnoreCase; } } + + //Framework comparers. + public static VssStringComparer RegistryPath { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ServiceType { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer AccessMappingMoniker { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer CatalogNodePath { get { return s_ordinal; } } + public static VssStringComparer CatalogServiceReference { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer CatalogNodeDependency { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ServicingTokenName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityPropertyName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer Collation { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer FeatureAvailabilityName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer TagName { get { return s_currentCultureIgnoreCase; } } + + //Framework Hosting comparers. + public static VssStringComparer HostingAccountPropertyName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer MessageBusName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer MessageBusSubscriptionName { get { return s_ordinalIgnoreCase; } } + + public static VssStringComparer SID { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer FieldName { get { return s_ordinal; } } + public static VssStringComparer FieldNameUI { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer FieldType { get { return s_ordinal; } } + public static VssStringComparer EventType { get { return s_ordinal; } } + public static VssStringComparer EventTypeIgnoreCase { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer RegistrationEntryName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ServerName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer GroupName { get { return s_currentCultureIgnoreCase; } } + public static VssStringComparer RegistrationUtilities { get { return s_ordinal; } } + public static VssStringComparer RegistrationUtilitiesCaseInsensitive { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer IdentityNameOrdinal { get { return s_ordinal; } } + public static VssStringComparer PlugInId { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ExtensionName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer ExtensionType { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer DomainUrl { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer AccountInfoAccount { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer AccountInfoPassword { get { return s_ordinal; } } + public static VssStringComparer AttributesDescriptor { get { return s_ordinalIgnoreCase; } } + + // Converters comparer + public static VssStringComparer VSSServerPath { get { return s_ordinalIgnoreCase; } } + + // Item rename in VSS is case sensitive. + public static VssStringComparer VSSItemName { get { return s_ordinal; } } + // Web Access Comparers + public static VssStringComparer HtmlElementName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer HtmlAttributeName { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer HtmlAttributeValue { get { return s_ordinalIgnoreCase; } } + + public static VssStringComparer StringFieldConditionEquality { get { return s_ordinalIgnoreCase; } } + public static VssStringComparer StringFieldConditionOrdinal { get { return s_ordinal; } } + + // Service Endpoint Comparer + public static VssStringComparer ServiceEndpointTypeCompararer { get { return s_ordinalIgnoreCase; } } + + private class DataSourceIgnoreProtocolComparer : VssStringComparer + { + public DataSourceIgnoreProtocolComparer() + : base(StringComparison.OrdinalIgnoreCase) + { + } + + public override int Compare(string x, string y) + { + return base.Compare(RemoveProtocolPrefix(x), RemoveProtocolPrefix(y)); + } + + public override bool Equals(string x, string y) + { + return base.Equals(RemoveProtocolPrefix(x), RemoveProtocolPrefix(y)); + } + + private static string RemoveProtocolPrefix(string x) + { + if (x != null) + { + if (x.StartsWith(c_tcpPrefix, StringComparison.OrdinalIgnoreCase)) + { + x = x.Substring(c_tcpPrefix.Length); + } + else if (x.StartsWith(c_npPrefix, StringComparison.OrdinalIgnoreCase)) + { + x = x.Substring(c_npPrefix.Length); + } + } + + return x; + } + + private const string c_tcpPrefix = "tcp:"; + private const string c_npPrefix = "np:"; + } + } +} diff --git a/src/Sdk/Common/Common/Utility/XmlUtility.cs b/src/Sdk/Common/Common/Utility/XmlUtility.cs new file mode 100644 index 00000000000..0c66b3a92ee --- /dev/null +++ b/src/Sdk/Common/Common/Utility/XmlUtility.cs @@ -0,0 +1,1489 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using System.Xml; +using System.Xml.Linq; + +namespace GitHub.Services.Common.Internal +{ + + [EditorBrowsable(EditorBrowsableState.Never)] +#if !NETSTANDARD + [CLSCompliant(false)] +#endif + public static class XmlUtility + { + internal static FileStream OpenFile(String path, FileShare sharing, Boolean saveFile) + { + XmlDocument noXmlDocument; + return OpenFileHelper(path, sharing, saveFile, false, out noXmlDocument); + } + + internal static XmlDocument OpenXmlFile(out FileStream file, String path, FileShare sharing, Boolean saveFile) + { + XmlDocument xmlDocument; + file = OpenFileHelper(path, sharing, saveFile, true, out xmlDocument); + + return xmlDocument; + } + + private static FileStream OpenFileHelper(String path, FileShare sharing, Boolean saveFile, Boolean loadAsXmlDocument, out XmlDocument xmlDocument) + { + const int RetryCount = 10; + FileStream file = null; + xmlDocument = null; + + if (String.IsNullOrEmpty(path)) + { + return null; + } + + // If the file doesn't exist or an exception is thrown while trying to check that, we don't have + // a cache file. + if (!saveFile && !File.Exists(path)) + { + return null; + } + + int retries = 0; + Random random = null; + while (retries <= RetryCount) + { + try + { + // Make sure the user hasn't made the file read-only if we are writing the file. + FileAccess fileAccess = FileAccess.Read; + FileMode fileMode = FileMode.Open; + if (saveFile) + { + fileAccess = FileAccess.ReadWrite; + fileMode = FileMode.OpenOrCreate; + } + + file = new FileStream(path, fileMode, fileAccess, sharing); + + if (loadAsXmlDocument) + { + XmlReaderSettings settings = new XmlReaderSettings() + { + DtdProcessing = DtdProcessing.Prohibit, + XmlResolver = null, + }; + + using (XmlReader xmlReader = XmlReader.Create(file, settings)) + { + xmlDocument = new XmlDocument(); + xmlDocument.Load(xmlReader); + } + } + + return file; + } + catch (Exception exception) + { + if (file != null) + { + file.Dispose(); + file = null; + } + + if (exception is OperationCanceledException) + { + // Do not swallow the CancelException. + throw; + } + else if (exception is IOException || exception is UnauthorizedAccessException || exception is XmlException) + { + // If there was no cache file on disk, optionally create one. + if (saveFile) + { + try + { + // Create the directory if it does not exist. + if (exception is DirectoryNotFoundException) + { + String dir = Path.GetDirectoryName(path); + Directory.CreateDirectory(dir); + } + + // Reset attributes (file might be read-only) + if (exception is UnauthorizedAccessException) + { + File.SetAttributes(path, FileAttributes.Normal); + } + + xmlDocument = null; + return new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.None); + } + catch (Exception newException) when (newException is IOException || newException is UnauthorizedAccessException) + { + if (retries >= RetryCount) + { + throw new AggregateException(exception, newException); + } + } + } + else + { + return null; + } + } + else if (retries >= RetryCount) + { + throw new VssServiceException(CommonResources.ErrorReadingFile(Path.GetFileName(path), exception.Message), exception); + } + } + + // Wait before trying again. + if (random == null) + { + random = new Random(); + } + + int sleepTime = random.Next(1, 150); + Task.Delay(sleepTime).Wait(); + retries++; + } + + // Should never get to here + Debug.Fail("Code should be unreachable."); + return null; + } + + internal static void AddXmlAttribute(XmlNode node, String attrName, String value) + { + if (value != null) + { + XmlAttribute attr = node.OwnerDocument.CreateAttribute(null, attrName, null); + node.Attributes.Append(attr); + attr.InnerText = value; + } + } + + /// + /// Returns a single shared instance of secured XML reader settings. + /// + /// + /// The main configuration that is set is to "Harden or Disable XML Entity Resolution", + /// which disallows resolving entities during XML parsing. + /// + /// DO NOT USE this method if you need to resolved XML DTD entities. + /// + public static XmlReaderSettings SecureReaderSettings + { + get + { + if (s_safeSettings == null) + { + XmlReaderSettings settings = new XmlReaderSettings() + { + DtdProcessing = DtdProcessing.Prohibit, + XmlResolver = null, + }; + + s_safeSettings = settings; + } + + return s_safeSettings; + } + } + + public static XmlDocument GetDocument(Stream input) + { + XmlDocument doc = new XmlDocument(); + + using (XmlReader xmlReader = XmlReader.Create(input, SecureReaderSettings)) + { + doc.Load(xmlReader); + } + + return doc; + } + + public static XmlDocument GetDocument(string xml) + { + XmlDocument doc = new XmlDocument(); + using (StringReader stringReader = new StringReader(xml)) + using (XmlReader xmlReader = XmlReader.Create(stringReader, SecureReaderSettings)) + { + doc.Load(xmlReader); + } + + return doc; + } + + public static XmlDocument GetDocumentFromPath(string path) + { + XmlDocument doc = new XmlDocument(); + + using (XmlReader xmlReader = XmlReader.Create(path, SecureReaderSettings)) + { + doc.Load(xmlReader); + } + + return doc; + } + + public static DateTime ToDateTime(String s) + { + DateTime time = XmlConvert.ToDateTime(s, XmlDateTimeSerializationMode.RoundtripKind); + + // As of Dev11, WIT uses TeamFoundationClientProxy when it used to use TeamFoundationSoapProxy. + // In Orcas, the WIT server would return DateTime strings which were UTC over-the-wire, but were not specified as such. + // e.g. "01/28/2011T22:00:00.000" instead of "01/28/2011T22:00:00.000Z" + // We need to handle that case now. If the time is unspecified, we'll assume it to be Utc. + if (time.Kind == DateTimeKind.Unspecified && + time != DateTime.MinValue && + time != DateTime.MaxValue) + { + time = DateTime.SpecifyKind(time, DateTimeKind.Utc); + } + + // Convert all year one values to DateTime.MinValue, a flag value meaning the date is not set. + // We don't want the timezone set on DateTime.MinValue... + if (time.Year == 1) + { + time = DateTime.MinValue; + } + else + { + time = time.ToLocalTime(); + } + + return time; + } + + public static DateTime ToDateOnly(String s) + { + // we intentionally don't want to call ToLocalTime for converting Date only + // because we ignore both Time and TimeZone. + return XmlConvert.ToDateTime(s, XmlDateTimeSerializationMode.RoundtripKind); + } + + public static String ToStringDateOnly(DateTime d) + { + return d.ToString("yyyy-MM-dd", CultureInfo.InvariantCulture); + } + + public static String ToString(DateTime d) + { + Debug.Assert(d == DateTime.MinValue || d == DateTime.MaxValue || d.Kind != DateTimeKind.Unspecified, "DateTime kind is unspecified instead of Local or Utc."); + return XmlConvert.ToString(d, XmlDateTimeSerializationMode.RoundtripKind); + } + + public static void ObjectToXmlElement(XmlWriter writer, String element, Object o) + { + if (o == null) + { + writer.WriteStartElement(element); + writer.WriteAttributeString("nil", "http://www.w3.org/2001/XMLSchema-instance", "true"); + writer.WriteEndElement(); + } + else + { + String clrTypeName = o.GetType().FullName; + String soapType = null, soapValue = null, soapNamespaceUri = null; + switch (clrTypeName) + { + case "System.Boolean": + soapType = "boolean"; + soapValue = XmlConvert.ToString((Boolean)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Byte": + soapType = "unsignedByte"; + soapValue = XmlConvert.ToString((Byte)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Byte[]": + soapType = "base64Binary"; + byte[] array = (byte[])o; + soapValue = Convert.ToBase64String(array, 0, array.Length); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Char": + soapType = "char"; + soapValue = XmlConvert.ToString((UInt16)((Char)o)); + soapNamespaceUri = "http://microsoft.com/wsdl/types/"; + break; + case "System.DateTime": + soapType = "dateTime"; + soapValue = ToString((DateTime)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Decimal": + soapType = "decimal"; + soapValue = XmlConvert.ToString((Decimal)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Double": + soapType = "double"; + soapValue = XmlConvert.ToString((Double)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Guid": + soapType = "guid"; + soapValue = XmlConvert.ToString((Guid)o); + soapNamespaceUri = "http://microsoft.com/wsdl/types/"; + break; + case "System.Int16": + soapType = "short"; + soapValue = XmlConvert.ToString((Int16)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Int32": + soapType = "int"; + soapValue = XmlConvert.ToString((Int32)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Int64": + soapType = "long"; + soapValue = XmlConvert.ToString((Int64)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.Single": + soapType = "float"; + soapValue = XmlConvert.ToString((Single)o); + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + case "System.String": + soapType = "string"; + soapValue = (String)o; + soapNamespaceUri = "http://www.w3.org/2001/XMLSchema"; + break; + default: + if (o.GetType().IsArray) + { + Debug.Assert(o.GetType().GetArrayRank() == 1, "ERROR: Cannot serialize multi-dimensional arrays"); + + writer.WriteStartElement(element); + writer.WriteAttributeString("type", "http://www.w3.org/2001/XMLSchema-instance", "ArrayOfAnyType"); + ArrayOfObjectToXml(writer, (Object[])o, null, "anyType", true, false, ObjectToXmlElement); + writer.WriteEndElement(); + return; + } + else + { + Debug.Fail("Unknown object type for serialization " + clrTypeName); + throw new ArgumentException(CommonResources.UnknownTypeForSerialization(clrTypeName)); + } + } + + writer.WriteStartElement(element); + writer.WriteStartAttribute("type", "http://www.w3.org/2001/XMLSchema-instance"); + writer.WriteQualifiedName(soapType, soapNamespaceUri); + writer.WriteEndAttribute(); + writer.WriteValue(soapValue); + writer.WriteEndElement(); + } + } + + public static Object ObjectFromXmlElement(XmlReader reader) + { + String soapTypeName = reader.GetAttribute("type", "http://www.w3.org/2001/XMLSchema-instance"); + if (!String.IsNullOrEmpty(soapTypeName)) + { + String[] components = soapTypeName.Split(new char[] { ':' }, StringSplitOptions.None); + if (components.Length == 2) + { + soapTypeName = components[1]; +#if DEBUG + String ns = reader.LookupNamespace(components[0]); + if (!String.IsNullOrEmpty(ns) && + !ns.Equals("http://www.w3.org/2001/XMLSchema", StringComparison.OrdinalIgnoreCase) && + !ns.Equals("http://microsoft.com/wsdl/types/", StringComparison.OrdinalIgnoreCase)) + { + Debug.Fail("Unknown namespace encountered for object type " + ns); + reader.ReadOuterXml(); + return null; + } +#endif + } + + switch (soapTypeName) + { + case "base64Binary": + String str = StringFromXmlElement(reader); + if (str != null) + { + return Convert.FromBase64String(str); + } + return ZeroLengthArrayOfByte; + case "boolean": + return XmlConvert.ToBoolean(StringFromXmlElement(reader)); + case "char": + return (Char)XmlConvert.ToInt16(StringFromXmlElement(reader)); // Char goes over the wire as short + case "dateTime": + return ToDateTime(StringFromXmlElement(reader)); + case "decimal": + return XmlConvert.ToDecimal(StringFromXmlElement(reader)); + case "double": + return XmlConvert.ToDouble(StringFromXmlElement(reader)); + case "float": + return XmlConvert.ToSingle(StringFromXmlElement(reader)); + case "int": + return XmlConvert.ToInt32(StringFromXmlElement(reader)); + case "guid": + return XmlConvert.ToGuid(StringFromXmlElement(reader)); + case "long": + return XmlConvert.ToInt64(StringFromXmlElement(reader)); + case "short": + return XmlConvert.ToInt16(StringFromXmlElement(reader)); + case "string": + return StringFromXmlElement(reader); + case "unsignedByte": + return XmlConvert.ToByte(StringFromXmlElement(reader)); + case "ArrayOfAnyType": + return ArrayOfObjectFromXml(reader); + default: + Debug.Fail("Unknown object type encountered " + soapTypeName); + throw new ArgumentException(CommonResources.UnknownTypeForSerialization(soapTypeName)); + } + } + else if (reader.GetAttribute("nil", "http://www.w3.org/2001/XMLSchema-instance") == "true") + { + reader.ReadInnerXml(); + return null; + } + + return null; + } + + public static void ToXml(XmlWriter writer, String element, Object[] array) + { + if (array == null || array.Length == 0) + { + return; + } + + if (!String.IsNullOrEmpty(element)) + { + writer.WriteStartElement(element); + } + + for (int i = 0; i < array.Length; i++) + { + if (array[i] == null) + { + throw new ArgumentNullException("array[" + i + "]"); + } + ObjectToXmlElement(writer, "anyType", array[i]); + } + + if (!String.IsNullOrEmpty(element)) + { + writer.WriteEndElement(); + } + } + + public static Object[] ArrayOfObjectFromXml(XmlReader reader) + { + List list = new List(); + bool empty = reader.IsEmptyElement; + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + if (reader.HasAttributes && + reader.GetAttribute("nil", "http://www.w3.org/2001/XMLSchema-instance") == "true") + { + list.Add(null); + reader.Read(); + } + else + { + list.Add(ObjectFromXmlElement(reader)); + } + } + reader.ReadEndElement(); + } + return list.ToArray(); + } + + public static void ToXmlElement(XmlWriter writer, String elementName, XmlNode node) + { + if (node == null) + { + return; + } + + writer.WriteStartElement(elementName); + node.WriteTo(writer); + writer.WriteEndElement(); + } + + public static XmlNode XmlNodeFromXmlElement(XmlReader reader) + { + // Advance the reader so we are at the contents of the node rather than + // starting at the root node. Typically the root node will be the name of + // a property, parameter, or result, and we should not include this in the + // resulting XML. + reader.Read(); + + XmlDocument document = new XmlDocument + { + PreserveWhitespace = false + }; + document.Load(reader); + + // Call Normalize to ensure that we don't create a whole bunch of additional XmlText elements + // for blobs of, for example, HTML content. + document.Normalize(); + + reader.ReadEndElement(); + return document.DocumentElement; + } + + public static DateTime DateFromXmlAttribute(XmlReader reader) + { + return ToDateOnly(StringFromXmlAttribute(reader)); + } + + public static DateTime DateFromXmlElement(XmlReader reader) + { + return ToDateOnly(StringFromXmlElement(reader)); + } + + public static void DateToXmlAttribute(XmlWriter writer, String name, DateTime value) + { + StringToXmlAttribute(writer, name, ToStringDateOnly(value)); + } + + public static void DateToXmlElement(XmlWriter writer, String name, DateTime value) + { + StringToXmlElement(writer, name, ToStringDateOnly(value)); + } + + public static Boolean BooleanFromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToBoolean(StringFromXmlAttribute(reader)); + } + + public static DateTime DateTimeFromXmlAttribute(XmlReader reader) + { + return ToDateTime(StringFromXmlAttribute(reader)); + } + + public static DateTime DateTimeFromXmlElement(XmlReader reader) + { + return ToDateTime(StringFromXmlElement(reader)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, DateTime value) + { + StringToXmlAttribute(writer, name, ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, DateTime value) + { + StringToXmlElement(writer, name, ToString(value)); + } + + public static void ToXml(XmlWriter writer, String element, byte[] array) + { + // Omit zero-length arrays to save bandwidth. + if (array == null || array.Length == 0) + { + return; + } + writer.WriteElementString(element, Convert.ToBase64String(array, 0, array.Length)); + } + + public static void ToXmlAttribute(XmlWriter writer, String attr, byte[] array) + { + // Omit zero-length arrays to save bandwidth. + if (array == null || array.Length == 0) + { + return; + } + writer.WriteAttributeString(attr, Convert.ToBase64String(array, 0, array.Length)); + } + + private static XmlReaderSettings s_safeSettings; + + public static String ToString(Uri uri) + { + return uri.AbsoluteUri; + } + + public static Uri ToUri(String s) + { + if (String.IsNullOrEmpty(s)) + { + return null; + } + else + { + return new Uri(s); + } + } + + public static T EnumFromXmlText(XmlReader reader) + { + String s = StringFromXmlText(reader); + s = s.Replace(' ', ','); + return (T)Enum.Parse(typeof(T), s, true); + } + + public static void EnumToXmlText(XmlWriter writer, String ignored, T value) + { + String s = Enum.Format(typeof(T), value, "G"); + s = s.Replace(",", ""); + writer.WriteString(s); + } + + public static void EnumToXmlAttribute(XmlWriter writer, String attr, T value) + { + String s = Enum.Format(typeof(T), value, "G"); + s = s.Replace(",", ""); + writer.WriteAttributeString(attr, s); + } + + public static T EnumFromXmlAttribute(XmlReader reader) + { + String s = StringFromXmlAttribute(reader); + s = s.Replace(' ', ','); + return (T)Enum.Parse(typeof(T), s, true); + } + + public static void EnumToXmlElement(XmlWriter writer, String element, T value) + { + String s = Enum.Format(typeof(T), value, "G"); + s = s.Replace(",", ""); + writer.WriteElementString(element, s); + } + + public static T EnumFromXmlElement(XmlReader reader) + { + String s = StringFromXmlElement(reader); + s = s.Replace(' ', ','); + return (T)Enum.Parse(typeof(T), s, true); + } + + public static T[] ArrayOfObjectFromXml( + XmlReader reader, + String arrayElementName, + Boolean inline, + Func objectFromXmlElement) + { + return ArrayOfObjectFromXml(null, reader, arrayElementName, inline, (x, y) => objectFromXmlElement(y)); + } + + public static T[] ArrayOfObjectFromXml( + IServiceProvider serviceProvider, + XmlReader reader, + String arrayElementName, + Boolean inline, + Func objectFromXmlElement) + { + List list = new List(); + bool empty = reader.IsEmptyElement; + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + if (!inline) + { + reader.Read(); + } + + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element && (!inline || reader.Name == arrayElementName)) + { + if (reader.HasAttributes && reader.GetAttribute("nil", "http://www.w3.org/2001/XMLSchema-instance") == "true") + { + list.Add(default(T)); + reader.Read(); + } + else + { + list.Add(objectFromXmlElement(serviceProvider, reader)); + } + } + reader.ReadEndElement(); + } + return list.ToArray(); + } + + /// + /// Writes an array of objects to xml using the provided callback function to serialize individual objects + /// within the array. + /// + /// The type of objects contained in the array + /// The xml writer for serialization + /// The array to be serialized + /// The name of the array root element + /// The name of the array elements + /// True if the array elements should be written inline, or false to create the root node + /// True if an empty array should be serialized, false to omit empty arrays + /// A callback function for serializing an individual array element + public static void ArrayOfObjectToXml( + XmlWriter writer, + T[] array, + String arrayName, + String arrayElementName, + Boolean inline, + Boolean allowEmptyArrays, + Action objectToXmlElement) + { + if (array == null) + { + return; + } + + if (array.Length == 0) + { + if (allowEmptyArrays && !String.IsNullOrEmpty(arrayName)) + { + writer.WriteStartElement(arrayName); + writer.WriteEndElement(); + } + return; + } + + if (!inline) + { + writer.WriteStartElement(arrayName); + + for (Int32 i = 0; i < array.Length; i = i + 1) + { + if (array[i] == null) + { + writer.WriteStartElement(arrayElementName); + writer.WriteAttributeString("nil", "http://www.w3.org/2001/XMLSchema-instance", "true"); + writer.WriteEndElement(); + } + else + { + objectToXmlElement(writer, arrayElementName, array[i]); + } + } + writer.WriteEndElement(); + } + else + { + for (Int32 i = 0; i < array.Length; i = i + 1) + { + if (array[i] == null) + { + writer.WriteStartElement(arrayElementName); + writer.WriteAttributeString("nil", "http://www.w3.org/2001/XMLSchema-instance", "true"); + writer.WriteEndElement(); + } + else + { + objectToXmlElement(writer, arrayElementName, array[i]); + } + } + } + } + + /// + /// Writes an System.Collections.Generic.IEnumerable<T> of objects to xml using the provided + /// callback function to serialize individual objects. + /// + /// The type of objects contained in the array + /// The xml writer for serialization + /// The array to be serialized + /// The name of the array root element + /// The name of the array elements + /// True if the array elements should be written inline, or false to create the root node + /// True if an empty array should be serialized, false to omit empty arrays + /// A callback function for serializing an individual array element + public static void EnumerableOfObjectToXml( + XmlWriter writer, + IEnumerable enumerable, + String arrayName, + String arrayElementName, + Boolean inline, + Boolean allowEmptyArrays, + Action objectToXmlElement) + { + // Optionally omit zero-length enumerables to save bandwidth + if (enumerable == null) + { + return; + } + + if (!enumerable.Any()) + { + if (allowEmptyArrays && !String.IsNullOrEmpty(arrayName)) + { + writer.WriteStartElement(arrayName); + writer.WriteEndElement(); + } + return; + } + + if (!inline) + { + writer.WriteStartElement(arrayName); + + foreach (T item in enumerable) + { + if (item == null) + { + writer.WriteStartElement(arrayElementName); + writer.WriteAttributeString("nil", "http://www.w3.org/2001/XMLSchema-instance", "true"); + writer.WriteEndElement(); + } + else + { + objectToXmlElement(writer, arrayElementName, item); + } + } + writer.WriteEndElement(); + } + else + { + foreach (T item in enumerable) + { + if (item == null) + { + writer.WriteStartElement(arrayElementName); + writer.WriteAttributeString("nil", "http://www.w3.org/2001/XMLSchema-instance", "true"); + writer.WriteEndElement(); + } + else + { + objectToXmlElement(writer, arrayElementName, item); + } + } + } + } + + public static Boolean BooleanFromXmlElement(XmlReader reader) + { + return XmlConvert.ToBoolean(StringFromXmlElement(reader)); + } + + public static Byte ByteFromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToByte(StringFromXmlAttribute(reader)); + } + + public static Byte ByteFromXmlElement(XmlReader reader) + { + return XmlConvert.ToByte(StringFromXmlElement(reader)); + } + + public static Char CharFromXmlAttribute(XmlReader reader) + { + return (Char)XmlConvert.ToInt32(StringFromXmlAttribute(reader)); + } + + public static Char CharFromXmlElement(XmlReader reader) + { + return (Char)XmlConvert.ToInt32(StringFromXmlElement(reader)); + } + + public static Double DoubleFromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToDouble(StringFromXmlAttribute(reader)); + } + + public static Double DoubleFromXmlElement(XmlReader reader) + { + return XmlConvert.ToDouble(StringFromXmlElement(reader)); + } + + public static Guid GuidFromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToGuid(StringFromXmlAttribute(reader)); + } + + public static Guid GuidFromXmlElement(XmlReader reader) + { + return XmlConvert.ToGuid(StringFromXmlElement(reader)); + } + + public static Int16 Int16FromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToInt16(StringFromXmlAttribute(reader)); + } + + public static Int16 Int16FromXmlElement(XmlReader reader) + { + return XmlConvert.ToInt16(StringFromXmlElement(reader)); + } + + public static Int32 Int32FromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToInt32(StringFromXmlAttribute(reader)); + } + + public static Int32 Int32FromXmlElement(XmlReader reader) + { + return XmlConvert.ToInt32(StringFromXmlElement(reader)); + } + + public static Int64 Int64FromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToInt64(StringFromXmlAttribute(reader)); + } + + public static Int64 Int64FromXmlElement(XmlReader reader) + { + return XmlConvert.ToInt64(StringFromXmlElement(reader)); + } + + public static Single SingleFromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToSingle(StringFromXmlAttribute(reader)); + } + + public static Single SingleFromXmlElement(XmlReader reader) + { + return XmlConvert.ToSingle(StringFromXmlElement(reader)); + } + + public static String StringFromXmlAttribute(XmlReader reader) + { + return GetCachedString(reader.Value); + } + + public static String StringFromXmlElement(XmlReader reader) + { + String str = String.Empty; + Boolean isEmpty = reader.IsEmptyElement; + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + reader.Read(); + if (!isEmpty) + { + // We don't expect the server to send back a CDATA section, but the client OM + // may use the FromXml methods to read a hand-edited xml file. + if (reader.NodeType == XmlNodeType.CDATA || + reader.NodeType == XmlNodeType.Text || + reader.NodeType == XmlNodeType.Whitespace) + { + str = GetCachedString(reader.ReadContentAsString().Replace("\n", "\r\n")); + reader.ReadEndElement(); + } + else if (reader.NodeType == XmlNodeType.EndElement) + { + // in the case where the element is empty/whitespace such as , we need to read past the end element + reader.ReadEndElement(); + } + } + + return str; + } + + public static String StringFromXmlText(XmlReader reader) + { + String str = String.Empty; + if (reader.NodeType == XmlNodeType.CDATA || + reader.NodeType == XmlNodeType.Text || + reader.NodeType == XmlNodeType.Whitespace) + { + str = GetCachedString(reader.ReadContentAsString().Replace("\n", "\r\n")); + } + return str; + } + + public static TimeSpan TimeSpanFromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToTimeSpan(StringFromXmlAttribute(reader)); + } + + public static TimeSpan TimeSpanFromXmlElement(XmlReader reader) + { + return XmlConvert.ToTimeSpan(StringFromXmlElement(reader)); + } + + public static UInt16 UInt16FromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToUInt16(StringFromXmlAttribute(reader)); + } + + public static UInt16 UInt16FromXmlElement(XmlReader reader) + { + return XmlConvert.ToUInt16(StringFromXmlElement(reader)); + } + + public static UInt32 UInt32FromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToUInt32(StringFromXmlAttribute(reader)); + } + + public static UInt32 UInt32FromXmlElement(XmlReader reader) + { + return XmlConvert.ToUInt32(StringFromXmlElement(reader)); + } + + public static UInt64 UInt64FromXmlAttribute(XmlReader reader) + { + return XmlConvert.ToUInt64(StringFromXmlAttribute(reader)); + } + + public static UInt64 UInt64FromXmlElement(XmlReader reader) + { + return XmlConvert.ToUInt64(StringFromXmlElement(reader)); + } + + public static Uri UriFromXmlAttribute(XmlReader reader) + { + return ToUri(StringFromXmlAttribute(reader)); + } + + public static Uri UriFromXmlElement(XmlReader reader) + { + return ToUri(StringFromXmlElement(reader)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Boolean value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Byte value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Char value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString((Int32)value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Double value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Guid value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Int16 value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Int32 value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Int64 value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Single value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, String value) + { + StringToXmlAttribute(writer, name, value); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, TimeSpan value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, UInt16 value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, UInt32 value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, UInt64 value) + { + StringToXmlAttribute(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlAttribute(XmlWriter writer, String name, Uri value) + { + StringToXmlAttribute(writer, name, ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Boolean value) + { + StringToXmlElement(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Byte value) + { + StringToXmlElement(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Char value) + { + StringToXmlElement(writer, name, XmlConvert.ToString((Int32)value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Double value) + { + StringToXmlElement(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Guid value) + { + StringToXmlElement(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String element, Int16 value) + { + StringToXmlElement(writer, element, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String element, Int32 value) + { + StringToXmlElement(writer, element, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String element, Int64 value) + { + StringToXmlElement(writer, element, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Single value) + { + StringToXmlElement(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, String value) + { + StringToXmlElement(writer, name, value); + } + + public static void ToXmlElement(XmlWriter writer, String name, TimeSpan value) + { + StringToXmlElement(writer, name, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String element, UInt16 value) + { + StringToXmlElement(writer, element, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String element, UInt32 value) + { + StringToXmlElement(writer, element, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String element, UInt64 value) + { + StringToXmlElement(writer, element, XmlConvert.ToString(value)); + } + + public static void ToXmlElement(XmlWriter writer, String name, Uri value) + { + StringToXmlElement(writer, name, ToString(value)); + } + + public static void StringToXmlAttribute(XmlWriter writer, String name, String value) + { + writer.WriteAttributeString(name, value); + } + + public static void StringToXmlElement(XmlWriter writer, String name, String value) + { + try + { + writer.WriteElementString(name, value); + } + catch (ArgumentException e) + { + Debug.Assert(e.Message.IndexOf("invalid character", StringComparison.OrdinalIgnoreCase) > 0, "Unexpected exception: " + e.ToString()); + throw new VssServiceException(CommonResources.StringContainsIllegalChars(), e); + } + } + + public static void StringToXmlText(XmlWriter writer, String str) + { + if (str == null) + { + return; + } + + try + { + writer.WriteString(str); + } + catch (ArgumentException e) + { + Debug.Assert(e.Message.IndexOf("invalid character", StringComparison.OrdinalIgnoreCase) > 0, "Unexpected exception: " + e.ToString()); + throw new VssServiceException(CommonResources.StringContainsIllegalChars(), e); + } + } + + public static byte[] ArrayOfByteFromXml(XmlReader reader) + { + String str = StringFromXmlElement(reader); + if (str != null) + { + return Convert.FromBase64String(str); + } + return ZeroLengthArrayOfByte; + } + + public static byte[] ArrayOfByteFromXmlAttribute(XmlReader reader) + { + if (reader.Value.Length != 0) + { + return Convert.FromBase64String(reader.Value); + } + return ZeroLengthArrayOfByte; + } + + public static byte[] ZeroLengthArrayOfByte + { + get + { + if (s_zeroLengthArrayOfByte == null) + { + s_zeroLengthArrayOfByte = new byte[0]; + } + return s_zeroLengthArrayOfByte; + } + } + + public static bool CompareXmlDocuments(string xml1, string xml2) + { + if (xml1 == xml2) + { + return true; + } + else if (string.IsNullOrEmpty(xml1) || string.IsNullOrEmpty(xml2)) + { + return false; + } + + XDocument x1 = XDocument.Parse(xml1); + XDocument x2 = XDocument.Parse(xml2); + + return Compare(x1?.Root, x2?.Root); + } + + private static bool Compare(XContainer x1, XContainer x2) + { + if (object.ReferenceEquals(x1, x2)) + { + return true; + } + + XElement e1 = x1 as XElement; + XElement e2 = x2 as XElement; + + if (e1 != null && e2 != null) + { + if (!VssStringComparer.XmlNodeName.Equals(e1.Name.ToString(), e2.Name.ToString()) || + !e1.Attributes().OrderBy(a => a.Name.ToString()).SequenceEqual(e2.Attributes().OrderBy(a => a.Name.ToString()), s_xmlAttributeComparer) || + !VssStringComparer.XmlElement.Equals(e1.Value, e2.Value)) + { + return false; + } + + return x1.Elements().OrderBy(xe => xe.Name.ToString()).SequenceEqual(x2.Elements().OrderBy(xe => xe.Name.ToString()), s_xmlElementComparer); + } + + return false; + } + + #region GetCachedString + + /// + /// Strings are often duplicated in the XML returned by the server. To + /// reduce the number of identical String instances, we keep a small + /// cache of the last N strings to be deserialized off the wire. + /// + /// Send your deserialized strings through this method. If they match a + /// recently deserialized string, the cached value will be returned and + /// your deserialized string will be left in Gen0 for easy collection. + /// + private static String GetCachedString(String fromXml) + { + if (null == fromXml) + { + return null; + } + + int fromXmlLength = fromXml.Length; + + // Don't cache large strings. They take a lot longer to compare. + if (fromXmlLength > 256) + { + return fromXml; + } + + if (fromXmlLength == 0) + { + return String.Empty; + } + + String[] stringList = ts_stringList; + + // Set up the thread-static data structures if they have not yet + // been initialized. + if (null == stringList) + { + stringList = new String[c_stringCacheSize]; + ts_stringList = stringList; + } + + // Check for a cache hit. + for (int i = 0; i < c_stringCacheSize; i++) + { + String cachedString = stringList[i]; + + if (null == cachedString) + { + break; + } + + // If the lengths or first characters are different, this + // is not a hit. + if (cachedString.Length != fromXmlLength || + fromXml[0] != cachedString[0]) + { + continue; + } + + // If the strings are 6 characters or longer, check the character + // 5 characters from the end. Remember at this point we know the + // strings are identical in length. + if (fromXmlLength > 5 && + fromXml[fromXmlLength - 5] != cachedString[fromXmlLength - 5]) + { + continue; + } + + // OK, looks like a potential hit, let's verify with String.Equals. + if (String.Equals(fromXml, cachedString, StringComparison.Ordinal)) + { + // This is a cache hit. Move it to the 0 position and shove + // everything else down. + for (int j = i - 1; j >= 0; j--) + { + stringList[j + 1] = stringList[j]; + } + + stringList[0] = cachedString; + + return cachedString; + } + } + + // This is a cache miss. Evict the nth position, move everything else + // down, and insert this at the 0 position. + for (int i = c_stringCacheSize - 2; i >= 0; i--) + { + stringList[i + 1] = stringList[i]; + } + + stringList[0] = fromXml; + + return fromXml; + } + + [ThreadStatic] + private static String[] ts_stringList; + + // Size of the cache. Larger values mean more memory savings + // but more time spent in GetCachedString. + private const int c_stringCacheSize = 16; + + #endregion GetCachedString + + private class AttributeComparer : IEqualityComparer + { + public bool Equals(XAttribute x, XAttribute y) + { + if (x == y) + { + return true; + } + + if (x == null || y == null) + { + return false; + } + + return VssStringComparer.XmlAttributeName.Equals(x.Name.ToString(), y.Name.ToString()) && + VssStringComparer.XmlAttributeValue.Equals(x.Value, y.Value); + } + + public int GetHashCode(XAttribute obj) + { + if (obj == null) + { + return 0; + } + return obj.GetHashCode(); + } + } + + private class ElementComparer : IEqualityComparer + { + public bool Equals(XElement x, XElement y) + { + if (x == y) + { + return true; + } + if (x == null || y == null) + { + return false; + } + return XmlUtility.Compare(x, y); + } + + public int GetHashCode(XElement obj) + { + if (obj == null) + { + return 0; + } + return obj.GetHashCode(); + } + } + + private static byte[] s_zeroLengthArrayOfByte; + private static readonly AttributeComparer s_xmlAttributeComparer = new AttributeComparer(); + private static readonly ElementComparer s_xmlElementComparer = new ElementComparer(); + } + + /// + /// XML element writer class that automatically makes the closing WriteEndElement call + /// during dispose. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class XmlElementWriterUtility : IDisposable + { + private XmlWriter m_xmlWriter; + + /// + /// Constructor. + /// + public XmlElementWriterUtility(string elementName, XmlWriter xmlWriter) + { + m_xmlWriter = xmlWriter; + m_xmlWriter.WriteStartElement(elementName); + } + + /// + /// Dispose. + /// + public void Dispose() + { + m_xmlWriter.WriteEndElement(); + } + } +} diff --git a/src/Sdk/Common/Common/VssCommonConstants.cs b/src/Sdk/Common/Common/VssCommonConstants.cs new file mode 100644 index 00000000000..7de2e3fb94d --- /dev/null +++ b/src/Sdk/Common/Common/VssCommonConstants.cs @@ -0,0 +1,1054 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics.CodeAnalysis; + +namespace GitHub.Services.Common +{ + public static class AdminConstants + { + /// + /// Each incoming web request is assigned a server process id, this constant defines + /// an element within the Context.Items[] to hold that value. + /// + public const String ServerProcessID = "serverProcessID"; + public const String ApplicationName = "ApplicationName"; + } + + [GenerateSpecificConstants] + public static class IdentityConstants + { + static IdentityConstants() + { + // For the normalization of incoming IdentityType strings. + // This is an optimization; it is not required that any particular IdentityType values + // appear in this list, but it helps performance to have common values here + var identityTypeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { IdentityConstants.WindowsType, IdentityConstants.WindowsType }, + { IdentityConstants.TeamFoundationType, IdentityConstants.TeamFoundationType }, + { IdentityConstants.ClaimsType, IdentityConstants.ClaimsType }, + { IdentityConstants.BindPendingIdentityType, IdentityConstants.BindPendingIdentityType }, + { IdentityConstants.UnauthenticatedIdentityType, IdentityConstants.UnauthenticatedIdentityType }, + { IdentityConstants.ServiceIdentityType, IdentityConstants.ServiceIdentityType }, + { IdentityConstants.AggregateIdentityType, IdentityConstants.AggregateIdentityType }, + { IdentityConstants.ServerTestIdentity, IdentityConstants.ServerTestIdentity }, + { IdentityConstants.ImportedIdentityType, IdentityConstants.ImportedIdentityType }, + { IdentityConstants.GroupScopeType, IdentityConstants.GroupScopeType }, + { IdentityConstants.CspPartnerIdentityType, IdentityConstants.CspPartnerIdentityType }, + { IdentityConstants.System_ServicePrincipal, IdentityConstants.System_ServicePrincipal }, + { IdentityConstants.System_License, IdentityConstants.System_License }, + { IdentityConstants.System_Scope, IdentityConstants.System_Scope }, + { IdentityConstants.PermissionLevelDefinitionType, IdentityConstants.PermissionLevelDefinitionType} + }; + + IdentityTypeMap = identityTypeMap; + } + + public const string WindowsType = "System.Security.Principal.WindowsIdentity"; // hard coding to make PCL compliant. typeof(WindowsIdentity).FullName + public const string TeamFoundationType = "GitHub.Identity"; + public const string ClaimsType = "Microsoft.IdentityModel.Claims.ClaimsIdentity"; + // In WIF 4.5, Microsoft.IdentityModel.Claims.ClaimsIdentity was moved to System.Security.Claims namespace + [EditorBrowsable(EditorBrowsableState.Never)] + public const string Wif45ClaimsIdentityType = "System.Security.Claims.ClaimsIdentity"; + public const string AlternateLoginType = "GitHub.Services.Cloud.AlternateLoginIdentity"; + public const string BindPendingIdentityType = "GitHub.BindPendingIdentity"; + public const string ServerTestIdentity = "GitHub.Services.Identity.ServerTestIdentity"; + public const string UnauthenticatedIdentityType = "GitHub.UnauthenticatedIdentity"; + public const string ServiceIdentityType = "GitHub.ServiceIdentity"; + public const string AggregateIdentityType = "GitHub.AggregateIdentity"; + public const string ImportedIdentityType = "GitHub.ImportedIdentity"; + public const string UnknownIdentityType = "GitHub.Services.Identity.UnknownIdentity"; + public const string CspPartnerIdentityType = "GitHub.Claims.CspPartnerIdentity"; + public const string PermissionLevelDefinitionType = "GitHub.Services.PermissionLevel.PermissionLevelIdentity"; + + // this is used to represent scopes in the new Graph Rest Api + public const string GroupScopeType = "GitHub.Services.Graph.GraphScope"; + + // These are used with the System Subject Store + public const string SystemPrefix = "System:"; + public const string System_ServicePrincipal = SystemPrefix + "ServicePrincipal"; + public const string System_WellKnownGroup = SystemPrefix + "WellKnownGroup"; + public const string System_License = SystemPrefix + "License"; + public const string System_Scope = SystemPrefix + "Scope"; + public const string System_CspPartner = SystemPrefix + "CspPartner"; + public const string System_PublicAccess = SystemPrefix + "PublicAccess"; + + // This is used to convey an ACE via an IdentityDescriptor + public const string System_AccessControl = SystemPrefix + "AccessControl"; + + public const int MaxIdLength = 256; + public const int MaxTypeLength = 128; + public const byte UnknownIdentityTypeId = byte.MaxValue; + + // Social type for identity + public const byte UnknownSocialTypeId = byte.MaxValue; + + /// + /// Special value for the unique user ID for active (non-deleted) users. + /// + public const int ActiveUniqueId = 0; + + /// + /// Value of attribute that denotes whether user or group. + /// + public const string SchemaClassGroup = "Group"; + public const string SchemaClassUser = "User"; + + public const string BindPendingSidPrefix = "upn:"; + [GenerateConstant] + public const string MsaDomain = "Windows Live ID"; + [GenerateConstant] + public const string GitHubDomain = "github.com"; + public const string DomainQualifiedAccountNameFormat = "{0}\\{1}"; + public const string MsaSidSuffix = "@Live.com"; + public const string AadOidPrefix = "oid:"; + public const string FrameworkIdentityIdentifierDelimiter = ":"; + public const string IdentityDescriptorPartsSeparator = ";"; + public const string IdentityMinimumResourceVersion = "IdentityMinimumResourceVersion"; + public const int DefaultMinimumResourceVersion = -1; + public const char DomainAccountNameSeparator = '\\'; + public const bool DefaultUseAccountNameAsDirectoryAlias = true; + + /// + /// Values used in switch_hint query parameter to force sign in with personal or work account + /// + public const string SwitchHintQueryKey = "switch_hint"; + public const char SwitchToPersonal = 'P'; + public const char SwitchToWork = 'W'; + + public const string AllowNonServiceIdentitiesInDeploymentAdminsGroup = + nameof(AllowNonServiceIdentitiesInDeploymentAdminsGroup); + + /// + /// The DB layer only supports byte, even though the data layer contracts suggests a + /// 32-bit integer. Note: changing this constant implies that every new identity object + /// that is created, going forward will have this resource version set. Existing identites + /// will need to be updated to the current resource version level manually. + /// + /// This is created for rolling out of a feature based on identity not service host. + /// This value must be greater than 0. Otherwise, IMS won't update tbl_identityextension for + /// identity extended properties. + /// + public const byte DefaultResourceVersion = 2; + + // Identity ResourceVersions + [Obsolete] + public const byte ScopeManifestIssuance = 2; + [Obsolete] + public const byte ScopeManifestEnforcementWithInitialGrace = 3; + [Obsolete] + public const byte ScopeManifestEnforcementWithoutInitialGrace = 4; + + /// + /// The Global scope, [SERVER], represents the highest group Scope ID in the given request context. + /// For example, [SERVER] at a Deployment context would represent the deployment Scope ID. When + /// using the global scope in a search, a search for [SERVER]\Team Foundation Administrators + /// at the deployment level would return the deployment administrators group, while the same call + /// at the Application host level would return the Account Administrators group. The search will + /// not recurse down into sub-scopes. + /// + /// [SERVER] is a deprecated concept, introduced before TFS 2010. We recommend using either the + /// collection name in square brackets (i.e. [DefaultCollection] or the scope ID in square brackets + /// (i.e. [SCOPE_GUID]) instead. + /// + public const string GlobalScope = "[SERVER]"; + + public static readonly Guid LinkedId = new Guid("FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF"); + + public static class EtwIdentityProviderName + { + public const string Aad = nameof(Aad); + public const string Msa = nameof(Msa); + public const string Vsts = nameof(Vsts); + } + + public static class EtwIdentityCategory + { + public const string AuthenticatedIdentity = nameof(AuthenticatedIdentity); + public const string UnauthenticatedIdentity = nameof(UnauthenticatedIdentity); + public const string ServiceIdentity = nameof(ServiceIdentity); + public const string UnexpectedIdentityType = nameof(UnexpectedIdentityType); + } + + public static readonly IReadOnlyDictionary IdentityTypeMap; + } + + /// + /// Common attributes tags used in the collection of properties of TeamFoundationIdentity. + /// + public static class IdentityAttributeTags + { + public const string WildCard = "*"; + + public const string AccountName = "Account"; + public const string Alias = "Alias"; + public const string CrossProject = "CrossProject"; + public const string Description = "Description"; + public const string Disambiguation = "Disambiguation"; + public const string DistinguishedName = "DN"; + public const string Domain = "Domain"; + public const string GlobalScope = "GlobalScope"; + public const string MailAddress = "Mail"; + public const string RestrictedVisible = "RestrictedVisible"; + public const string SchemaClassName = "SchemaClassName"; + public const string ScopeName = "ScopeName"; + public const string SecurityGroup = "SecurityGroup"; + public const string SpecialType = "SpecialType"; + public const string ScopeId = "ScopeId"; + public const string ScopeType = "ScopeType"; + public const string LocalScopeId = "LocalScopeId"; + public const string SecuringHostId = "SecuringHostId"; + public const string VirtualPlugin = "VirtualPlugin"; + public const string ProviderDisplayName = "ProviderDisplayName"; + public const string IsGroupDeleted = "IsGroupDeleted"; + + public const string Cuid = "CUID"; + public const string CuidState = "CUIDState"; + public const string Puid = "PUID"; + public const string Oid = "http://schemas.microsoft.com/identity/claims/objectidentifier"; + public const string ConsumerPuid = "ConsumerPUID"; + public const string ComplianceValidated = "ComplianceValidated"; + public const string AuthenticationCredentialValidFrom = "AuthenticationCredentialValidFrom"; + public const string MetadataUpdateDate = "MetadataUpdateDate"; + public const string DirectoryAlias = "DirectoryAlias"; + public const string CacheMaxAge = "CacheMaxAge"; + // temporary used in the ServiceIdentity and CspIdentity + public const string ServiceStorageKey = "http://schemas.microsoft.com/ws/2008/06/identity/claims/primarysid"; + public const string ProvData = "prov_data"; + + public const string AadRefreshToken = "vss:AadRefreshToken"; + public const string AadRefreshTokenUpdated = "GitHub.Aad.AadRefreshTokenUpdateDate"; + public const string AadUserPrincipalName = "AadUserPrincipalName"; + public const string AcsIdentityProvider = "http://schemas.microsoft.com/accesscontrolservice/2010/07/claims/identityprovider"; + public const string AadIdentityProvider = "http://schemas.microsoft.com/identity/claims/identityprovider"; + public const string IdentityProviderClaim = "http://schemas.microsoft.com/teamfoundationserver/2010/12/claims/identityprovider"; + public const string NameIdentifierClaim = "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier"; + public const string TenantIdentifierClaim = "http://schemas.microsoft.com/identity/claims/tenantid"; + public const string AadTenantDisambiguationClaim = "tenant_disambiguate"; + public const string AadMsaPassthroughClaim = "msapt"; + public const string AppidClaim = "appid"; + + public const string IdentityTypeClaim = "IdentityTypeClaim"; + public const string IsClientClaim = "IsClient"; + + // tbl_IdentityExtension properties. No longer stored in PropertyService + public const string ConfirmedNotificationAddress = "ConfirmedNotificationAddress"; + public const string CustomNotificationAddresses = "CustomNotificationAddresses"; + public const string IsDeletedInOrigin = "IsDeletedInOrigin"; + + // Extended properties, currently used only for Group images + public const string ImageId = "GitHub.Identity.Image.Id"; + public const string ImageData = @"GitHub.Identity.Image.Data"; + public const string ImageType = @"GitHub.Identity.Image.Type"; + public const string ImageUploadDate = @"GitHub.Identity.Image.UploadDate"; + public const string CandidateImageData = @"GitHub.Identity.CandidateImage.Data"; + public const string CandidateImageUploadDate = @"GitHub.Identity.CandidateImage.UploadDate"; + + // Extended Properties used On Prem + public const string LastAccessedTime = "LastAccessedTime"; + + // Extended Property used by Profile to get the MasterId of an identity. + // DO NOT USE without consulting with and getting permission from the + // Identity team. This is a bad pattern that we are currently supporting + // for compat with Profile, and the whole concept of MasterIds may be + // changing with our Sharding work. + public const string UserId = "UserId"; + + // Obsolete extended properties, which should be removed with the next major version (whichever version follows Dev15/TFS 2017) + [Obsolete] public const string EmailConfirmationSendDates = "EmailConfirmationSendDates"; + [Obsolete] public const string MsdnLicense = "MSDNLicense"; + [Obsolete] public const string BasicAuthPwdKey = "GitHub.Identity.BasicAuthPwd"; + [Obsolete] public const string BasicAuthSaltKey = "GitHub.Identity.BasicAuthSalt"; + [Obsolete] public const string BasicAuthAlgorithm = "Microsoft.TeaFoundation.Identity.BasicAuthAlgorithm"; + [Obsolete] public const string BasicAuthFailures = "Microsoft.TeaFoundation.Identity.BasicAuthFailures"; + [Obsolete] public const string BasicAuthDisabled = "Microsoft.TeaFoundation.Identity.BasicAuthDisabled"; + [Obsolete] public const string BasicAuthPasswordChanges = "GitHub.Identity.BasicAuthSettingsChanges"; + + + [SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "When the target .NET framework is revisioned to 4, change return to ISet")] + public static readonly HashSet ReadOnlyProperties = new HashSet( + new[] + { + AccountName, + Alias, + ComplianceValidated, + CrossProject, + Description, + Disambiguation, + DistinguishedName, + Domain, + GlobalScope, + MailAddress, + RestrictedVisible, + SchemaClassName, + ScopeName, + SecurityGroup, + SpecialType, + ScopeId, + ScopeType, + LocalScopeId, + SecuringHostId, + Cuid, + CuidState, + Puid, + VirtualPlugin, + Oid, + AcsIdentityProvider, + AadIdentityProvider, + AadTenantDisambiguationClaim, + AadMsaPassthroughClaim, + IdentityProviderClaim, + NameIdentifierClaim, + IsClientClaim, + UserId, + CacheMaxAge, + IsGroupDeleted, + }, + StringComparer.OrdinalIgnoreCase + ); + + [SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes", Justification = "When the target .NET framework is revisioned to 4, change return to ISet")] + public static readonly HashSet GroupReadOnlyProperties = new HashSet( + new[] + { + Alias, + ComplianceValidated, + CrossProject, + Disambiguation, + DistinguishedName, + Domain, + GlobalScope, + MailAddress, + RestrictedVisible, + SchemaClassName, + ScopeName, + SecurityGroup, + SpecialType, + ScopeId, + ScopeType, + LocalScopeId, + SecuringHostId, + Cuid, + CuidState, + Puid, + VirtualPlugin, + Oid, + AcsIdentityProvider, + AadIdentityProvider, + AadTenantDisambiguationClaim, + AadMsaPassthroughClaim, + IdentityProviderClaim, + NameIdentifierClaim, + IsClientClaim, + UserId, + CacheMaxAge, + IsGroupDeleted, + }, + StringComparer.OrdinalIgnoreCase + ); + + [Obsolete] + public static readonly ISet WhiteListedProperties = new HashSet(StringComparer.OrdinalIgnoreCase); + } + + public static class DirectoryRoleConstants + { + /// Name of the directory role that represents "Company Administrator/Global Admin" + public const string CompanyAdministrator = "Company Administrator"; + } + + // Used with Registration entries + [GenerateSpecificConstants] + public static class ToolNames + { + public const string Framework = "Framework"; + [GenerateConstant] + public const string VersionControl = "VersionControl"; + [GenerateConstant] + public const string WorkItemTracking = "WorkItemTracking"; + [GenerateConstant] + public const string RemoteWorkItemTracking = "RemoteWorkItemTracking"; + public const string CoreServices = "vstfs"; + public const string Warehouse = "Reports"; + [GenerateConstant] + public const string TeamBuild = "Build"; + public const string ProxyServer = "ps"; + public const string TeamFoundation = "vstfs"; + public const string SharePoint = "Wss"; + [GenerateConstant] + public const string TestManagement = "TestManagement"; + public const string LabManagement = "LabManagement"; + public const string ReleaseManagement = "ReleaseManagement"; + public const string SyncService = "SyncService"; + public const string TestRig = "TestRig"; + public const string TSWebAccess = "TSWebAccess"; + public const string ProjectServer = "ProjectServer"; + public const string DeploymentRig = "DeploymentRig"; + public const string TeamProjects = "TeamProjects"; // contains specific project registration entries (project portal, process guidance and doc url) + public const string Discussion = "Discussion"; + [GenerateConstant] + public const string Requirements = "Requirements"; + [GenerateConstant] + public const string Hyperlink = "Hyperlink"; + public const string Classification = "Classification"; + [GenerateConstant] + public const string Legacy = "Legacy"; + [GenerateConstant] + public const string CodeSense = "CodeSense"; + [GenerateConstant] + public const string Git = "Git"; + [GenerateConstant] + public const string CodeReview = "CodeReview"; + [GenerateConstant] + public const string ProjectDownload = "ProjectDownload"; + public const string DistributedTask = "DistributedTask"; + [GenerateConstant] + public const string Wiki = "Wiki"; + + public const string Search = "Search"; + [GenerateConstant] + public const string GitHub = "GitHub"; + } + + // Artifact types + [GenerateSpecificConstants] + public static class ArtifactTypeNames + { + public const string Project = "TeamProject"; + public const string Node = "Node"; + public const string Collector = "Collector"; + public const string TestResult = "TestResult"; + [GenerateConstant] + public const string TcmResult = "TcmResult"; + [GenerateConstant] + public const string TcmResultAttachment = "TcmResultAttachment"; + [GenerateConstant] + public const string TcmTest = "TcmTest"; + [GenerateConstant] + public const string Build = "Build"; + public const string BuildAgent = "Agent"; + public const string BuildDefinition = "Definition"; + public const string BuildController = "Controller"; + public const string BuildGroup = "Group"; + public const string BuildRequest = "Request"; + public const string BuildServiceHost = "ServiceHost"; + [GenerateConstant] + public const string VersionedItem = "VersionedItem"; + [GenerateConstant] + public const string LatestItemVersion = "LatestItemVersion"; + [GenerateConstant] + public const string Changeset = "Changeset"; + public const string Label = "Label"; + [GenerateConstant] + public const string Shelveset = "Shelveset"; + public const string ShelvedItem = "ShelvedItem"; + [GenerateConstant] + public const string WorkItem = "WorkItem"; + public const string Query = "Query"; + public const string Results = "Results"; + public const string LabEnvironment = "LabEnvironment"; + public const string LabTemplate = "LabTemplate"; + public const string LabSystem = "LabSystem"; + public const string TeamProjectHostGroup = "TeamProjectHostGroup"; + public const string TeamProjectLibraryShare = "TeamProjectLibraryShare"; + public const string TeamProjectCollectionLibraryShare = "TeamProjectCollectionLibraryShare"; + public const string TeamProjectCollectionHostGroup = "TeamProjectCollectionHostGroup"; + public const string TestMachine = "TestMachine"; + [GenerateConstant] + public const string Storyboard = "Storyboard"; + [GenerateConstant] + public const string Commit = "Commit"; + public const string LaunchLatestVersionedItem = "LaunchLatestVersionedItem"; + [GenerateConstant] + public const string CodeReviewId = "CodeReviewId"; + [GenerateConstant] + public const string CodeReviewSdkId = "ReviewId"; + [GenerateConstant] + public const string PullRequestId = "PullRequestId"; + [GenerateConstant] + public const string ProjectDownloadProject = "Project"; + /// + /// A Git Ref + /// + [GenerateConstant] + public const string Ref = "Ref"; + + public const string TaskAgentPoolMaintenance = "PoolMaintenance"; + [GenerateConstant] + public const string WikiPage = "WikiPage"; + + // GitHub + [GenerateConstant] + public const string PullRequest = "PullRequest"; + [GenerateConstant] + public const string Issue = "Issue"; + } + + /// + /// Constant strings used in Notifications + /// + public static class NotificationConstants + { + /// + /// Macro used in subscriptions which will be replaced by the project name when evaluated + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.MyProjectNameMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String MyProjectNameMacro = "@@MyProjectName@@"; + + /// + /// Macro used in subscriptions which will be replaced by the subscriber's Display Name when evaluated + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.MyDisplayNameMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String MyDisplayNameMacro = "@@MyDisplayName@@"; + + /// + /// Macro used in subscriptions which will be replaced by the subscriber's Unique User Name when evaluated + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.MyUniqueNameMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String MyUniqueNameMacro = "@@MyUniqueName@@"; + + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.SingleQuoteNameMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String SingleQuoteNameMacro = "@@SQBDQ@@"; //SingleQuoteBetweenDoubleQuotes + + [Obsolete] + public const String SingleQuoteValue = "\"'\""; //"'" + + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.DoubleQuoteNameMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String DoubleQuoteNameMacro = "@@DQBSQ@@"; //DoubleQuoteBetweenSingleQuotes + + [Obsolete] + public const String DoubleQuoteValue = "'\"'"; //'"' + + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.SingleQuoteCharMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String SingleQuoteCharMacro = "@@SingleQuote@@"; + + [Obsolete] + public const String SingleQuoteCharValue = "'"; + + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.DoubleQuoteCharMacro in assembly MS.VS.Services.Notifications.WebApi")] + public const String DoubleQuoteCharMacro = "@@DoubleQuote@@"; + + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.DoubleQuoteCharValue in assembly MS.VS.Services.Notifications.WebApi")] + public const String DoubleQuoteCharValue = "\""; + + /// + /// Token used in subscription addresses to identify dynamic delivery targets computed from the source event + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.DynamicTargetsToken in assembly MS.VS.Services.Notifications.WebApi")] + public const String DynamicTargetsToken = "@@"; + + /// + /// TeamFoundationIdentity property name for a user's custom list of Email addresses to receive notifications at + /// + public const String CustomNotificationAddressesIdentityProperty = "CustomNotificationAddresses"; + + /// + /// TeamFoundationIdentity propery name for a user's confirmed Email address to receive notifications. This is used in Hosted environments only. + /// + public const string ConfirmedNotificationAddressIdentityProperty = "ConfirmedNotificationAddress"; + + /// + /// The name of the WorkItemChangedEvent + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.WorkItemChangedEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const string WorkItemChangedEventTypeName = "WorkItemChangedEvent"; + + /// + /// The name of the BuildStatusChangedEvent type + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.BuildStatusChangeEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const String BuildStatusChangeEventName = "BuildStatusChangeEvent"; + + /// + /// The name of the BuildCompletedEvent type + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.BuildCompletedEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const String BuildCompletedEventName = "BuildCompletedEvent"; + + /// + /// The name of the CheckinEvent type + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.CheckinEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const String CheckinEventName = "CheckinEvent"; + + /// + /// The name of the CodeReviewChangedEvent type + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.CodeReviewChangedEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const String CodeReviewChangedEventName = "CodeReviewChangedEvent"; + + /// + /// The name of the GitPushEvent type + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.GitPushEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const String GitPushEventName = "GitPushEvent"; + + /// + /// The name of the GitPullRequestEvent type + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.LegacyNames.GitPullRequestEvent in assembly MS.VS.Services.Notifications.WebApi")] + public const String GitPullRequestEventName = "GitPullRequestEvent"; + + /// + /// The relative path to the alerts admin web page + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationUrlConstants.AlertsPageRelativePath in assembly MS.VS.Services.Notifications.WebApi")] + public const String AlertsPageRelativePath = "{0}#id={1}&showteams={2}"; + + /// + /// The alerts page name + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationUrlConstants.AlertsPage in assembly MS.VS.Services.Notifications.WebApi")] + public const String AlertsPage = "_Alerts"; + + /// + /// The admin alerts page + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationUrlConstants.AlertsAdminPage in assembly MS.VS.Services.Notifications.WebApi")] + public const String AlertsAdminPage = "_admin/_Alerts"; + + /// + /// Property used to keep track of how many confirmations were sent for this user. Used to limit the number + /// of confirmations a single user is allowed to send out for their account. + /// The value is updated and monitored by the SendEmailConfirmationJob. + /// + public const string EmailConfirmationSendDates = "EmailConfirmationSendDates"; + + /// + /// Prefix to denote that identity field value have been processed + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.ProcessedFlagCharacter in assembly MS.VS.Services.Notifications.WebApi")] + public const Char ProcessedFlagCharacter = (Char)7; + + /// + /// Prefix to denote that identity field value have been processed and converted to TFID + /// + /// [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.ProcessedTfIdFlagCharacter in assembly MS.VS.Services.Notifications.WebApi")] + public const Char ProcessedTfIdFlagCharacter = (Char)11; + + /// + /// Prefix to denote that this is the start of displayname value for this identity field + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.DisplayNameFlagCharacter in assembly MS.VS.Services.Notifications.WebApi")] + public const Char DisplayNameFlagCharacter = '|'; + + /// + /// Prefix to denote that this is the start of TFID value for this identity field + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.TfIdFlagCharacter in assembly MS.VS.Services.Notifications.WebApi")] + public const Char TfIdFlagCharacter = '%'; + + /// + /// Optional Feature flag to enable escaping Regex expressions when creating Notification subscriptions. + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.FeatureFlags.AllowUserRegexInMatchConditionFeatureFlag in assembly MS.VS.Services.Notifications.WebApi")] + public const string AllowUserRegexInMatchConditionFeatureFlag = "VisualStudio.Services.Notifications.AllowUserRegexInMatchCondition"; + + /// + /// The MDM scope name for the notification job + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.MDMNotificationJobScope in assembly MS.VS.Services.Notifications.WebApi")] + public const string MDMNotificationJobScope = "NotificationJob"; + + /// + /// Event processing delay KPI name + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.EventProcessingDelayKPI in assembly MS.VS.Services.Notifications.WebApi")] + public const string EventProcessingDelayKPI = "EventProcessingDelayInMs"; + + /// + /// Event processing delay KPI description + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.EventProcessingDelayKPIDesc in assembly MS.VS.Services.Notifications.WebApi")] + public const string EventProcessingDelayKPIDesc = "Time taken to start processing an event"; + + /// + /// The MDM scope name for the delivery job + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.MDMDeliveryJobscope in assembly MS.VS.Services.Notifications.WebApi")] + public const string MDMDeliveryJobscope = "NotificationDeliveryJob"; + + /// + /// Notification delivery delay KPI name + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.DeliveryDelayKPI in assembly MS.VS.Services.Notifications.WebApi")] + public const string DeliveryDelayKPI = "NotificationDeliveryDelayInMs"; + + /// + /// Notification delivery delay with retries KPI name + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.DeliveryDelayWithRetriesKPI in assembly MS.VS.Services.Notifications.WebApi")] + public const string DeliveryDelayWithRetriesKPI = "NotificationDeliveryDelayWithRetriesInMs"; + + /// + /// Total time taken between the event creation till the notification delivery + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.TotalProcessingTimeKPI in assembly MS.VS.Services.Notifications.WebApi")] + public const string TotalProcessingTimeKPI = "EventProcessingTimeInMs"; + + /// + /// Total time taken between the event creation till the notification delivery + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.TotalProcessingTimeWithRetriesKPI in assembly MS.VS.Services.Notifications.WebApi")] + public const string TotalProcessingTimeWithRetriesKPI = "EventProcessingTimeWithRetriesInMs"; + + /// + /// Notification delivery delay KPI description + /// + [Obsolete("Moved to GitHub.Services.Notifications.Common.MDMConstants.DeliveryDelayKPIDesc in assembly MS.VS.Services.Notifications.WebApi")] + public const string DeliveryDelayKPIDesc = "Time taken to start deliverying a notification"; + + // caching key for our notification bridge interface + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.BridgeKey in assembly MS.VS.Services.Notifications.WebApi")] + public const String BridgeKey = "@NotifBridge"; + + // delivery retry count registryKey + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.RetryCountRegistryKey in assembly MS.VS.Services.Notifications.WebApi")] + public const string RetryCountRegistryKey = "NotificationRetryCount"; + + // delivery retry count default value + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.RetryCountDefaultValue in assembly MS.VS.Services.Notifications.WebApi")] + public const Int32 RetryCountDefaultValue = 5; + + // the collection scope Guid + [Obsolete("Moved to GitHub.Services.Notifications.Common.NotificationFrameworkConstants.CollectionScope in assembly MS.VS.Services.Notifications.WebApi")] + public static Guid CollectionScope = new Guid("00000000-0000-636f-6c6c-656374696f6e"); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class LocationSecurityConstants + { + public static readonly Guid NamespaceId = new Guid("2725D2BC-7520-4AF4-B0E3-8D876494731F"); + public static readonly Char PathSeparator = '/'; + public static readonly string NamespaceRootToken = PathSeparator.ToString(); + public static readonly string ServiceDefinitionsToken = String.Concat(NamespaceRootToken, "ServiceDefinitions"); + public static readonly string AccessMappingsToken = String.Concat(NamespaceRootToken, "AccessMappings"); + + // Read for ServiceDefinitions and AccessMappings + public const Int32 Read = 1; + // Create/Update/Delete for ServiceDefinitions and AccessMappings + public const Int32 Write = 2; + public const Int32 AllPermissions = Read | Write; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class SecuritySecurityConstants + { + public static readonly Guid NamespaceId = new Guid("9A82C708-BFBE-4F31-984C-E860C2196781"); + public const char Separator = '/'; + public const String RootToken = ""; + + public const int Read = 1; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class GraphSecurityConstants + { + public static readonly Guid NamespaceId = new Guid("C2EE56C9-E8FA-4CDD-9D48-2C44F697A58E"); + public static readonly string RefsToken = "Refs"; + public static readonly string SubjectsToken = "Subjects"; + + public const int ReadByPublicIdentifier = 1; + public const int ReadByPersonalIdentifier = 2; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class TeamProjectSecurityConstants + { + public static readonly Guid NamespaceId = new Guid("52D39943-CB85-4d7f-8FA8-C6BAAC873819"); + + // Existed in Orcas + public static readonly Int32 GenericRead = 1; + public static readonly Int32 GenericWrite = 2; + public static readonly Int32 Delete = 4; + public static readonly Int32 PublishTestResults = 8; + public static readonly Int32 AdministerBuild = 16; + public static readonly Int32 StartBuild = 32; + public static readonly Int32 EditBuildStatus = 64; + public static readonly Int32 UpdateBuild = 128; + public static readonly Int32 DeleteTestResults = 256; + public static readonly Int32 ViewTestResults = 512; + + // Dev10 Beta1 + public static readonly Int32 ManageTestEnvironments = 2048; + + // Dev10 Beta2 + public static readonly Int32 ManageTestConfigurations = 4096; + + // Dev14 Update 2 / VSO (M91) + public static readonly Int32 WorkItemDelete = 8192; + + // Dev14 Update 2 / VSO (M92) + public static readonly Int32 WorkItemMove = 16384; + + // Dev14 Update 2 / VSO (M94) + public static readonly Int32 WorkItemPermanentlyDelete = 32768; + + // Dev15 / VSO (M99) + public static readonly Int32 Rename = 65536; + + /// + /// The permission required for setting project properties. + /// Introduced in Dev15 Update 2 / VSO (M116). + /// + public static readonly Int32 ManageProperties = 131072; + + /// + /// The permission required for setting system project properties. + /// Introduced in Dev15 Update 2 / VSO (M116). + /// + /// + /// This permission was excluded from AllPermissions to avoid being unintentionally granted. + /// + public static readonly Int32 ManageSystemProperties = 262144; + + /// + /// The permission required for bypassing the project property cache. + /// Introduced in Dev16 / VSO (M118). + /// + /// + /// This permission was excluded from AllPermissions to avoid being unintentionally granted. + /// + public static readonly Int32 BypassPropertyCache = 524288; + + /// + /// The permission required for bypassing the rules while updating work items. + /// Introduced in Dev16 / VSO (M126). + /// + public static readonly Int32 BypassRules= 1048576; + + /// + /// The permission required for suppressing notifications for work item updates. + /// Introduced in Dev16 / VSO (M126). + /// + public static readonly Int32 SuppressNotifications= 2097152; + + /// + /// The permission required for updating project visibility. + /// Introduced in Dev16 / VSO (M131). + /// + public static readonly Int32 UpdateVisibility = 4194304; + + /// + /// The permission required for changing the process of the team project + /// Introduced in Dev17 / VSO (M136). + /// + public static readonly Int32 ChangeProjectsProcess = 8388608; + + /// + /// The permission required for granting access to backlog management. For stakeholder, this would disabled for private project and enabled for public project. + /// Introduced in Dev17 / VSO (M137). + /// + /// + /// This permission was excluded from AllPermissions to avoid being unintentionally granted. + /// + public static readonly Int32 AgileToolsBacklogManagement = 16777216; + + /// + /// The permission required for granting access to backlog management. For stakeholder, this is always disabled. + /// Introduced in Dev17 / VSO (M150). + /// + /// + /// This permission was excluded from AllPermissions to avoid being unintentionally granted. + /// + public static readonly Int32 AgileToolsPlans = 33554432; + + public static readonly Int32 AllPermissions = + GenericRead | + GenericWrite | + Delete | + PublishTestResults | + AdministerBuild | + StartBuild | + EditBuildStatus | + UpdateBuild | + DeleteTestResults | + ViewTestResults | + ManageTestEnvironments | + ManageTestConfigurations | + WorkItemDelete | + WorkItemMove | + WorkItemPermanentlyDelete | + Rename | + ManageProperties | + BypassRules | + SuppressNotifications | + UpdateVisibility | + ChangeProjectsProcess; + + public const String ProjectTokenPrefix = "$PROJECT:"; + + public static String GetToken(String projectUri) + { + if (String.IsNullOrEmpty(projectUri) || !projectUri.StartsWith(ProjectTokenPrefix, StringComparison.OrdinalIgnoreCase)) + { + if (projectUri == null) + { + projectUri = String.Empty; + } + + return ProjectTokenPrefix + projectUri + ":"; + } + + return projectUri + ":"; + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ContentValidationSecurityConstants + { + public static readonly Guid NamespaceId = new Guid("B1982126-CB90-4479-BDFD-CBF193241CB8"); + public static readonly string ViolationsToken = "Violations"; + + public const int Read = 1; + public const int Write = 2; + } + + public enum WinHttpErrorCode + { + WINHTTP_ERROR_BASE = 12000, + WINHTTP_ERROR_LAST = WINHTTP_ERROR_BASE + 188, + + ERROR_WINHTTP_OUT_OF_HANDLES = WINHTTP_ERROR_BASE + 1, + ERROR_WINHTTP_TIMEOUT = WINHTTP_ERROR_BASE + 2, + ERROR_WINHTTP_INTERNAL_ERROR = WINHTTP_ERROR_BASE + 4, + ERROR_WINHTTP_INVALID_URL = WINHTTP_ERROR_BASE + 5, + ERROR_WINHTTP_UNRECOGNIZED_SCHEME = WINHTTP_ERROR_BASE + 6, + ERROR_WINHTTP_NAME_NOT_RESOLVED = WINHTTP_ERROR_BASE + 7, + ERROR_WINHTTP_INVALID_OPTION = WINHTTP_ERROR_BASE + 9, + ERROR_WINHTTP_OPTION_NOT_SETTABLE = WINHTTP_ERROR_BASE + 11, + ERROR_WINHTTP_SHUTDOWN = WINHTTP_ERROR_BASE + 12, + ERROR_WINHTTP_LOGIN_FAILURE = WINHTTP_ERROR_BASE + 15, + ERROR_WINHTTP_OPERATION_CANCELLED = WINHTTP_ERROR_BASE + 17, + ERROR_WINHTTP_INCORRECT_HANDLE_TYPE = WINHTTP_ERROR_BASE + 18, + ERROR_WINHTTP_INCORRECT_HANDLE_STATE = WINHTTP_ERROR_BASE + 19, + ERROR_WINHTTP_CANNOT_CONNECT = WINHTTP_ERROR_BASE + 29, + ERROR_WINHTTP_CONNECTION_ERROR = WINHTTP_ERROR_BASE + 30, + ERROR_WINHTTP_RESEND_REQUEST = WINHTTP_ERROR_BASE + 32, + ERROR_WINHTTP_SECURE_CERT_DATE_INVALID = WINHTTP_ERROR_BASE + 37, + ERROR_WINHTTP_SECURE_CERT_CN_INVALID = WINHTTP_ERROR_BASE + 38, + ERROR_WINHTTP_CLIENT_AUTH_CERT_NEEDED = WINHTTP_ERROR_BASE + 44, + ERROR_WINHTTP_SECURE_INVALID_CA = WINHTTP_ERROR_BASE + 45, + ERROR_WINHTTP_SECURE_CERT_REV_FAILED = WINHTTP_ERROR_BASE + 57, + ERROR_WINHTTP_CANNOT_CALL_BEFORE_OPEN = WINHTTP_ERROR_BASE + 100, + ERROR_WINHTTP_CANNOT_CALL_BEFORE_SEND = WINHTTP_ERROR_BASE + 101, + ERROR_WINHTTP_CANNOT_CALL_AFTER_SEND = WINHTTP_ERROR_BASE + 102, + ERROR_WINHTTP_CANNOT_CALL_AFTER_OPEN = WINHTTP_ERROR_BASE + 103, + ERROR_WINHTTP_HEADER_NOT_FOUND = WINHTTP_ERROR_BASE + 150, + ERROR_WINHTTP_INVALID_SERVER_RESPONSE = WINHTTP_ERROR_BASE + 152, + ERROR_WINHTTP_INVALID_HEADER = WINHTTP_ERROR_BASE + 153, + ERROR_WINHTTP_INVALID_QUERY_REQUEST = WINHTTP_ERROR_BASE + 154, + ERROR_WINHTTP_HEADER_ALREADY_EXISTS = WINHTTP_ERROR_BASE + 155, + ERROR_WINHTTP_REDIRECT_FAILED = WINHTTP_ERROR_BASE + 156, + ERROR_WINHTTP_SECURE_CHANNEL_ERROR = WINHTTP_ERROR_BASE + 157, + ERROR_WINHTTP_BAD_AUTO_PROXY_SCRIPT = WINHTTP_ERROR_BASE + 166, + ERROR_WINHTTP_UNABLE_TO_DOWNLOAD_SCRIPT = WINHTTP_ERROR_BASE + 167, + ERROR_WINHTTP_SECURE_INVALID_CERT = WINHTTP_ERROR_BASE + 169, + ERROR_WINHTTP_SECURE_CERT_REVOKED = WINHTTP_ERROR_BASE + 170, + ERROR_WINHTTP_NOT_INITIALIZED = WINHTTP_ERROR_BASE + 172, + ERROR_WINHTTP_SECURE_FAILURE = WINHTTP_ERROR_BASE + 175, + ERROR_WINHTTP_UNHANDLED_SCRIPT_TYPE = WINHTTP_ERROR_BASE + 176, + ERROR_WINHTTP_SCRIPT_EXECUTION_ERROR = WINHTTP_ERROR_BASE + 177, + ERROR_WINHTTP_AUTO_PROXY_SERVICE_ERROR = WINHTTP_ERROR_BASE + 178, + ERROR_WINHTTP_SECURE_CERT_WRONG_USAGE = WINHTTP_ERROR_BASE + 179, + ERROR_WINHTTP_AUTODETECTION_FAILED = WINHTTP_ERROR_BASE + 180, + ERROR_WINHTTP_HEADER_COUNT_EXCEEDED = WINHTTP_ERROR_BASE + 181, + ERROR_WINHTTP_HEADER_SIZE_OVERFLOW = WINHTTP_ERROR_BASE + 182, + ERROR_WINHTTP_CHUNKED_ENCODING_HEADER_SIZE_OVERFLOW = WINHTTP_ERROR_BASE + 183, + ERROR_WINHTTP_RESPONSE_DRAIN_OVERFLOW = WINHTTP_ERROR_BASE + 184, + ERROR_WINHTTP_CLIENT_CERT_NO_PRIVATE_KEY = WINHTTP_ERROR_BASE + 185, + ERROR_WINHTTP_CLIENT_CERT_NO_ACCESS_PRIVATE_KEY = WINHTTP_ERROR_BASE + 186, + ERROR_WINHTTP_CLIENT_AUTH_CERT_NEEDED_PROXY = WINHTTP_ERROR_BASE + 187, + ERROR_WINHTTP_SECURE_FAILURE_PROXY = WINHTTP_ERROR_BASE + 188 + } + + public enum CurlErrorCode + { + CURLE_OK = 0, + CURLE_UNSUPPORTED_PROTOCOL = 1, + CURLE_FAILED_INIT = 2, + CURLE_URL_MALFORMAT = 3, + CURLE_NOT_BUILT_IN = 4, + CURLE_COULDNT_RESOLVE_PROXY = 5, + CURLE_COULDNT_RESOLVE_HOST = 6, + CURLE_COULDNT_CONNECT = 7, + CURLE_FTP_WEIRD_SERVER_REPLY = 8, + CURLE_REMOTE_ACCESS_DENIED = 9, + CURLE_FTP_ACCEPT_FAILED = 10, + CURLE_FTP_WEIRD_PASS_REPLY = 11, + CURLE_FTP_ACCEPT_TIMEOUT = 12, + CURLE_FTP_WEIRD_PASV_REPLY = 13, + CURLE_FTP_WEIRD_227_FORMAT = 14, + CURLE_FTP_CANT_GET_HOST = 15, + CURLE_HTTP2 = 16, + CURLE_FTP_COULDNT_SET_TYPE = 17, + CURLE_PARTIAL_FILE = 18, + CURLE_FTP_COULDNT_RETR_FILE = 19, + CURLE_QUOTE_ERROR = 21, + CURLE_HTTP_RETURNED_ERROR = 22, + CURLE_WRITE_ERROR = 23, + CURLE_UPLOAD_FAILED = 25, + CURLE_READ_ERROR = 26, + CURLE_OUT_OF_MEMORY = 27, + CURLE_OPERATION_TIMEDOUT = 28, + CURLE_FTP_PORT_FAILED = 30, + CURLE_FTP_COULDNT_USE_REST = 31, + CURLE_RANGE_ERROR = 33, + CURLE_HTTP_POST_ERROR = 34, + CURLE_SSL_CONNECT_ERROR = 35, + CURLE_BAD_DOWNLOAD_RESUME = 36, + CURLE_FILE_COULDNT_READ_FILE = 37, + CURLE_LDAP_CANNOT_BIND = 38, + CURLE_LDAP_SEARCH_FAILED = 39, + CURLE_FUNCTION_NOT_FOUND = 41, + CURLE_ABORTED_BY_CALLBACK = 42, + CURLE_BAD_FUNCTION_ARGUMENT = 43, + CURLE_INTERFACE_FAILED = 45, + CURLE_TOO_MANY_REDIRECTS = 47, + CURLE_UNKNOWN_OPTION = 48, + CURLE_TELNET_OPTION_SYNTAX = 49, + CURLE_PEER_FAILED_VERIFICATION = 51, + CURLE_GOT_NOTHING = 52, + CURLE_SSL_ENGINE_NOTFOUND = 53, + CURLE_SSL_ENGINE_SETFAILED = 54, + CURLE_SEND_ERROR = 55, + CURLE_RECV_ERROR = 56, + CURLE_SSL_CERTPROBLEM = 58, + CURLE_SSL_CIPHER = 59, + CURLE_SSL_CACERT = 60, + CURLE_BAD_CONTENT_ENCODING = 61, + CURLE_LDAP_INVALID_URL = 62, + CURLE_FILESIZE_EXCEEDED = 63, + CURLE_USE_SSL_FAILED = 64, + CURLE_SEND_FAIL_REWIND = 65, + CURLE_SSL_ENGINE_INITFAILED = 66, + CURLE_LOGIN_DENIED = 67, + CURLE_TFTP_NOTFOUND = 68, + CURLE_TFTP_PERM = 69, + CURLE_REMOTE_DISK_FULL = 70, + CURLE_TFTP_ILLEGAL = 71, + CURLE_TFTP_UNKNOWNID = 72, + CURLE_REMOTE_FILE_EXISTS = 73, + CURLE_TFTP_NOSUCHUSER = 74, + CURLE_CONV_FAILED = 75, + CURLE_CONV_REQD = 76, + CURLE_SSL_CACERT_BADFILE = 77, + CURLE_REMOTE_FILE_NOT_FOUND = 78, + CURLE_SSH = 79, + CURLE_SSL_SHUTDOWN_FAILED = 80, + CURLE_AGAIN = 81, + CURLE_SSL_CRL_BADFILE = 82, + CURLE_SSL_ISSUER_ERROR = 83, + CURLE_FTP_PRET_FAILED = 84, + CURLE_RTSP_CSEQ_ERROR = 85, + CURLE_RTSP_SESSION_ERROR = 86, + CURLE_FTP_BAD_FILE_LIST = 87, + CURLE_CHUNK_FAILED = 88, + CURLE_NO_CONNECTION_AVAILABLE = 89, + CURLE_SSL_PINNEDPUBKEYNOTMATCH = 90, + CURLE_SSL_INVALIDCERTSTATUS = 91, + CURLE_HTTP2_STREAM = 92, + CURLE_RECURSIVE_API_CALL = 93 + } +} diff --git a/src/Sdk/Common/Common/VssException.cs b/src/Sdk/Common/Common/VssException.cs new file mode 100644 index 00000000000..c711c0ed4c2 --- /dev/null +++ b/src/Sdk/Common/Common/VssException.cs @@ -0,0 +1,291 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using System.Security; + +namespace GitHub.Services.Common +{ + /// + /// Base class for all custom exceptions thrown from Vss and Tfs code. + /// + /// + /// All Exceptions in the VSS space -- any exception that flows across + /// a REST API boudary -- should derive from VssServiceException. This is likely + /// almost ALL new exceptions. Legacy TFS exceptions that do not flow through rest + /// derive from TeamFoundationServerException or TeamFoundationServiceException + /// + [Serializable] + [SuppressMessage("Microsoft.Usage", "CA2240:ImplementISerializableCorrectly")] + [ExceptionMapping("0.0", "3.0", "VssException", "GitHub.Services.Common.VssException, GitHub.Services.Common, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public abstract class VssException : ApplicationException + { + /// + /// No-arg constructor that sumply defers to the base class. + /// + public VssException() : base() + { + } + + /// + /// Initializes an exception with the specified error message. + /// + /// Application-defined error code for this exception + public VssException(int errorCode) : this(errorCode, false) + { + } + + /// + /// Initializes an exception with the specified error message. + /// + /// Application-defined error code for this exception + /// Indicate whether this exception should be logged + public VssException(int errorCode, bool logException) + { + ErrorCode = errorCode; + LogException = logException; + } + + /// + /// Initializes an exception with the specified error message. + /// + /// A human readable message that describes the error + public VssException(string message) : base(SecretUtility.ScrubSecrets(message)) + { + } + + /// + /// Initializes an exception with the specified error message and an inner exception that caused this exception to be raised. + /// + /// A human readable message that describes the error + /// + public VssException(string message, Exception innerException) : base(SecretUtility.ScrubSecrets(message), innerException) + { + } + + /// + /// Initializes an exception with the specified error message and an inner exception that caused this exception to be raised. + /// + /// A human readable message that describes the error + /// Application defined error code + /// + public VssException(string message, int errorCode, Exception innerException) : this(message, innerException) + { + ErrorCode = errorCode; + LogException = false; + } + + /// + /// Initializes an exception with the specified error message and an inner exception that caused this exception to be raised. + /// + /// A human readable message that describes the error + /// Application defined error code + public VssException(string message, int errorCode) : this(message, errorCode, false) + { + } + + /// + /// Initializes an exception with the specified error message and an inner exception that caused this exception to be raised. + /// + /// A human readable message that describes the error + /// Application defined error code + /// Indicate whether this exception should be logged + public VssException(string message, int errorCode, bool logException) : this(message) + { + ErrorCode = errorCode; + LogException = logException; + } + + /// + /// Initializes an exception with the specified error message and an inner exception that caused this exception to be raised. + /// + /// A human readable message that describes the error + /// Application defined error code + /// + /// + public VssException(string message, int errorCode, bool logException, Exception innerException) : this(message, innerException) + { + ErrorCode = errorCode; + LogException = logException; + } + + /// + /// Initializes an exception from serialized data + /// + /// object holding the serialized data + /// context info about the source or destination + protected VssException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + LogException = (bool)info.GetValue("m_logException", typeof(bool)); + ReportException = (bool)info.GetValue("m_reportException", typeof(bool)); + ErrorCode = (int)info.GetValue("m_errorCode", typeof(int)); +#if !NETSTANDARD + LogLevel = (EventLogEntryType)info.GetValue("m_logLevel", typeof(EventLogEntryType)); +#endif + EventId = (int)info.GetValue("m_eventId", typeof(int)); + } + + [SecurityCritical] + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("m_logException", LogException); + info.AddValue("m_reportException", ReportException); + info.AddValue("m_errorCode", ErrorCode); +#if !NETSTANDARD + info.AddValue("m_logLevel", LogLevel); +#endif + info.AddValue("m_eventId", EventId); + } + + /// Indicate whether this exception instance should be logged + /// True (false) if the exception should (should not) be logged + public bool LogException + { + get + { + return m_logException; + } + set + { + m_logException = value; + } + } + +#if !NETSTANDARD + /// The event log entry type to use when logging the exception + /// One of the event log entry types: + public EventLogEntryType LogLevel + { + get + { + return m_logLevel; + } + set + { + m_logLevel = value; + } + } +#endif + + /// A user-defined error code. + public int ErrorCode + { + get + { + return m_errorCode; + } + set + { + m_errorCode = value; + } + } + + /// The event ID to report if the exception is marked for the event log + /// The event ID used in the entry added to the event log + public int EventId + { + get + { + return m_eventId; + } + set + { + m_eventId = value; + } + } + + /// Indicate whether the exception should be reported through Dr. Watson + /// True (false) if the exception should (should not) be reported + public bool ReportException + { + get + { + return m_reportException; + } + set + { + m_reportException = value; + } + } + + /// + /// Gets the default serialized type name and type key for the given exception type. + /// + internal static void GetTypeNameAndKeyForExceptionType(Type exceptionType, Version restApiVersion, out String typeName, out String typeKey) + { + typeName = null; + typeKey = exceptionType.Name; + if (restApiVersion != null) + { + IEnumerable exceptionAttributes = exceptionType.GetTypeInfo().GetCustomAttributes().Where(ea => ea.MinApiVersion <= restApiVersion && ea.ExclusiveMaxApiVersion > restApiVersion); + if (exceptionAttributes.Any()) + { + ExceptionMappingAttribute exceptionAttribute = exceptionAttributes.First(); + typeName = exceptionAttribute.TypeName; + typeKey = exceptionAttribute.TypeKey; + } + else if (restApiVersion < s_backCompatExclusiveMaxVersion) //if restApiVersion < 3 we send the assembly qualified name with the current binary version switched out to 14 + { + typeName = GetBackCompatAssemblyQualifiedName(exceptionType); + } + } + + if (typeName == null) + { + + AssemblyName asmName = exceptionType.GetTypeInfo().Assembly.GetName(); + if (asmName != null) + { + //going forward we send "FullName" and simple assembly name which includes no version. + typeName = exceptionType.FullName + ", " + asmName.Name; + } + else + { + String assemblyString = exceptionType.GetTypeInfo().Assembly.FullName; + assemblyString = assemblyString.Substring(0, assemblyString.IndexOf(',')); + typeName = exceptionType.FullName + ", " + assemblyString; + } + } + } + + internal static String GetBackCompatAssemblyQualifiedName(Type type) + { + AssemblyName current = type.GetTypeInfo().Assembly.GetName(); + if (current != null) + { + AssemblyName old = current; + old.Version = new Version(c_backCompatVersion, 0, 0, 0); + return Assembly.CreateQualifiedName(old.ToString(), type.FullName); + } + else + { + //this is probably not necessary... + return type.AssemblyQualifiedName.Replace(c_currentAssemblyMajorVersionString, c_backCompatVersionString); + } + } + + private const String c_currentAssemblyMajorVersionString = "Version=" + GeneratedVersionInfo.AssemblyMajorVersion; + private const String c_backCompatVersionString = "Version=14"; + private const int c_backCompatVersion = 14; + + private static Version s_backCompatExclusiveMaxVersion = new Version(3, 0); + private bool m_logException; + private bool m_reportException; + private int m_errorCode; + +#if !NETSTANDARD + private EventLogEntryType m_logLevel = EventLogEntryType.Warning; +#endif + + private int m_eventId = DefaultExceptionEventId; + + //From EventLog.cs in Framework. + public const int DefaultExceptionEventId = 3000; + } +} diff --git a/src/Sdk/Common/Common/VssHttpMessageHandler.cs b/src/Sdk/Common/Common/VssHttpMessageHandler.cs new file mode 100644 index 00000000000..f51c3553b49 --- /dev/null +++ b/src/Sdk/Common/Common/VssHttpMessageHandler.cs @@ -0,0 +1,682 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// Provides authentication for Visual Studio Services. + /// + public class VssHttpMessageHandler : HttpMessageHandler + { + /// + /// Initializes a new VssHttpMessageHandler instance with default credentials and request + /// settings. + /// + public VssHttpMessageHandler() + : this(new VssCredentials(), new VssHttpRequestSettings()) + { + } + + /// + /// Initializes a new VssHttpMessageHandler instance with the specified credentials and request + /// settings. + /// + /// The credentials which should be used + /// The request settings which should be used + public VssHttpMessageHandler( + VssCredentials credentials, + VssHttpRequestSettings settings) + : this(credentials, settings, +#if !NETSTANDARD + new WebRequestHandler() +#else + new HttpClientHandler() +#endif + ) + { + } + + /// + /// Initializes a new VssHttpMessageHandler instance with the specified credentials and request + /// settings. + /// + /// The credentials which should be used + /// The request settings which should be used + /// + public VssHttpMessageHandler( + VssCredentials credentials, + VssHttpRequestSettings settings, + HttpMessageHandler innerHandler) + { + this.Credentials = credentials; + this.Settings = settings; + this.ExpectContinue = settings.ExpectContinue; + + m_credentialWrapper = new CredentialWrapper(); + m_messageInvoker = new HttpMessageInvoker(innerHandler); + + // If we were given a pipeline make sure we find the inner-most handler to apply our settings as this + // will be the actual outgoing transport. + { + HttpMessageHandler transportHandler = innerHandler; + DelegatingHandler delegatingHandler = transportHandler as DelegatingHandler; + while (delegatingHandler != null) + { + transportHandler = delegatingHandler.InnerHandler; + delegatingHandler = transportHandler as DelegatingHandler; + } + + m_transportHandler = transportHandler; + } + +#if NETSTANDARD + //.Net Core does not recognize CredentialCache.DefaultCredentials if we wrap them with CredentialWrapper + bool isDefaultCredentials = credentials != null && credentials.Windows != null && credentials.Windows.UseDefaultCredentials; + ApplySettings(m_transportHandler, isDefaultCredentials ? CredentialCache.DefaultCredentials : m_credentialWrapper, this.Settings); +#else + ApplySettings(m_transportHandler, m_credentialWrapper, this.Settings); +#endif + } + + /// + /// Gets the credentials associated with this handler. + /// + public VssCredentials Credentials + { + get; + private set; + } + + /// + /// Gets the settings associated with this handler. + /// + public VssHttpRequestSettings Settings + { + get; + private set; + } + + private Boolean ExpectContinue + { + get; + set; + } + + protected override void Dispose(Boolean disposing) + { + base.Dispose(disposing); + + if (disposing) + { + if (m_messageInvoker != null) + { + m_messageInvoker.Dispose(); + } + } + } + + internal static readonly String PropertyName = "MS.VS.MessageHandler"; + + /// + /// Handles the authentication hand-shake for a Visual Studio service. + /// + /// The HTTP request message + /// The cancellation token used for cooperative cancellation + /// A new Task<HttpResponseMessage> which wraps the response from the remote service + protected override async Task SendAsync( + HttpRequestMessage request, + CancellationToken cancellationToken) + { + VssTraceActivity traceActivity = VssTraceActivity.Current; + + var traceInfo = VssHttpMessageHandlerTraceInfo.GetTraceInfo(request); + traceInfo?.TraceHandlerStartTime(); + +#if !NETSTANDARD + // This action is deferred from ApplySettings because we want don't want to do it if we aren't + // talking to an HTTPS endpoint. + if (!m_appliedClientCertificatesToTransportHandler && + request.RequestUri.Scheme == "https") + { + WebRequestHandler webRequestHandler = m_transportHandler as WebRequestHandler; + if (webRequestHandler != null && + this.Settings.ClientCertificateManager != null && + this.Settings.ClientCertificateManager.ClientCertificates != null && + this.Settings.ClientCertificateManager.ClientCertificates.Count > 0) + { + webRequestHandler.ClientCertificates.AddRange(this.Settings.ClientCertificateManager.ClientCertificates); + } + m_appliedClientCertificatesToTransportHandler = true; + } + + if (!m_appliedServerCertificateValidationCallbackToTransportHandler && + request.RequestUri.Scheme == "https") + { + WebRequestHandler webRequestHandler = m_transportHandler as WebRequestHandler; + if (webRequestHandler != null && + this.Settings.ServerCertificateValidationCallback != null) + { + webRequestHandler.ServerCertificateValidationCallback = this.Settings.ServerCertificateValidationCallback; + } + m_appliedServerCertificateValidationCallbackToTransportHandler = true; + } +#else + if (!m_appliedClientCertificatesToTransportHandler && + request.RequestUri.Scheme == "https") + { + HttpClientHandler httpClientHandler = m_transportHandler as HttpClientHandler; + if (httpClientHandler != null && + this.Settings.ClientCertificateManager != null && + this.Settings.ClientCertificateManager.ClientCertificates != null && + this.Settings.ClientCertificateManager.ClientCertificates.Count > 0) + { + httpClientHandler.ClientCertificates.AddRange(this.Settings.ClientCertificateManager.ClientCertificates); + } + m_appliedClientCertificatesToTransportHandler = true; + } + + if (!m_appliedServerCertificateValidationCallbackToTransportHandler && + request.RequestUri.Scheme == "https") + { + HttpClientHandler httpClientHandler = m_transportHandler as HttpClientHandler; + if (httpClientHandler != null && + this.Settings.ServerCertificateValidationCallback != null) + { + httpClientHandler.ServerCertificateCustomValidationCallback = this.Settings.ServerCertificateValidationCallback; + } + m_appliedServerCertificateValidationCallbackToTransportHandler = true; + } + + // The .NET Core 2.1 runtime switched its HTTP default from HTTP 1.1 to HTTP 2. + // This causes problems with some versions of the Curl handler on Linux. + // See GitHub issue https://github.com/dotnet/corefx/issues/32376 + if (Settings.UseHttp11) + { + request.Version = HttpVersion.Version11; + } +#endif + + IssuedToken token = null; + IssuedTokenProvider provider; + if (this.Credentials.TryGetTokenProvider(request.RequestUri, out provider)) + { + token = provider.CurrentToken; + } + + // Add ourselves to the message so the underlying token issuers may use it if necessary + request.Properties[VssHttpMessageHandler.PropertyName] = this; + + Boolean succeeded = false; + Boolean lastResponseDemandedProxyAuth = false; + Int32 retries = m_maxAuthRetries; + HttpResponseMessage response = null; + HttpResponseMessageWrapper responseWrapper; + CancellationTokenSource tokenSource = null; + + try + { + tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + + if (this.Settings.SendTimeout > TimeSpan.Zero) + { + tokenSource.CancelAfter(this.Settings.SendTimeout); + } + + do + { + if (response != null) + { + response.Dispose(); + } + + ApplyHeaders(request); + + // In the case of a Windows token, only apply it to the web proxy if it + // returned a 407 Proxy Authentication Required. If we didn't get this + // status code back, then the proxy (if there is one) is clearly working fine, + // so we shouldn't mess with its credentials. + ApplyToken(request, token, applyICredentialsToWebProxy: lastResponseDemandedProxyAuth); + lastResponseDemandedProxyAuth = false; + + // The WinHttpHandler will chunk any content that does not have a computed length which is + // not what we want. By loading into a buffer up-front we bypass this behavior and there is + // no difference in the normal HttpClientHandler behavior here since this is what they were + // already doing. + await BufferRequestContentAsync(request, tokenSource.Token).ConfigureAwait(false); + + traceInfo?.TraceBufferedRequestTime(); + + // ConfigureAwait(false) enables the continuation to be run outside any captured + // SyncronizationContext (such as ASP.NET's) which keeps things from deadlocking... + response = await m_messageInvoker.SendAsync(request, tokenSource.Token).ConfigureAwait(false); + + traceInfo?.TraceRequestSendTime(); + + // Now buffer the response content if configured to do so. In general we will be buffering + // the response content in this location, except in the few cases where the caller has + // specified HttpCompletionOption.ResponseHeadersRead. + // Trace content type in case of error + await BufferResponseContentAsync(request, response, () => $"[ContentType: {response.Content.GetType().Name}]", tokenSource.Token).ConfigureAwait(false); + + traceInfo?.TraceResponseContentTime(); + + responseWrapper = new HttpResponseMessageWrapper(response); + + if (!this.Credentials.IsAuthenticationChallenge(responseWrapper)) + { + // Validate the token after it has been successfully authenticated with the server. + if (provider != null) + { + provider.ValidateToken(token, responseWrapper); + } + + // Make sure that once we can authenticate with the service that we turn off the + // Expect100Continue behavior to increase performance. + this.ExpectContinue = false; + succeeded = true; + break; + } + else + { + // In the case of a Windows token, only apply it to the web proxy if it + // returned a 407 Proxy Authentication Required. If we didn't get this + // status code back, then the proxy (if there is one) is clearly working fine, + // so we shouldn't mess with its credentials. + lastResponseDemandedProxyAuth = responseWrapper.StatusCode == HttpStatusCode.ProxyAuthenticationRequired; + + // Invalidate the token and ensure that we have the correct token provider for the challenge + // which we just received + VssHttpEventSource.Log.AuthenticationFailed(traceActivity, response); + + if (provider != null) + { + provider.InvalidateToken(token); + } + + // Ensure we have an appropriate token provider for the current challenge + provider = this.Credentials.CreateTokenProvider(request.RequestUri, responseWrapper, token); + + // Make sure we don't invoke the provider in an invalid state + if (provider == null) + { + VssHttpEventSource.Log.IssuedTokenProviderNotFound(traceActivity); + break; + } + else if (provider.GetTokenIsInteractive && this.Credentials.PromptType == CredentialPromptType.DoNotPrompt) + { + VssHttpEventSource.Log.IssuedTokenProviderPromptRequired(traceActivity, provider); + break; + } + + // If the user has already tried once but still unauthorized, stop retrying. The main scenario for this condition + // is a user typed in a valid credentials for a hosted account but the associated identity does not have + // access. We do not want to continually prompt 3 times without telling them the failure reason. In the + // next release we should rethink about presenting user the failure and options between retries. + IEnumerable headerValues; + Boolean hasAuthenticateError = + response.Headers.TryGetValues(HttpHeaders.VssAuthenticateError, out headerValues) && + !String.IsNullOrEmpty(headerValues.FirstOrDefault()); + + if (retries == 0 || (retries < m_maxAuthRetries && hasAuthenticateError)) + { + break; + } + + // Now invoke the provider and await the result + token = await provider.GetTokenAsync(token, tokenSource.Token).ConfigureAwait(false); + + // I always see 0 here, but the method above could take more time so keep for now + traceInfo?.TraceGetTokenTime(); + + // If we just received a token, lets ask the server for the VSID + request.Headers.Add(HttpHeaders.VssUserData, String.Empty); + + retries--; + } + } + while (retries >= 0); + + if (traceInfo != null) + { + traceInfo.TokenRetries = m_maxAuthRetries - retries; + } + + // We're out of retries and the response was an auth challenge -- then the request was unauthorized + // and we will throw a strongly-typed exception with a friendly error message. + if (!succeeded && response != null && this.Credentials.IsAuthenticationChallenge(responseWrapper)) + { + String message = null; + IEnumerable serviceError; + + if (response.Headers.TryGetValues(HttpHeaders.TfsServiceError, out serviceError)) + { + message = UriUtility.UrlDecode(serviceError.FirstOrDefault()); + } + else + { + message = CommonResources.VssUnauthorized(request.RequestUri.GetLeftPart(UriPartial.Authority)); + } + + // Make sure we do not leak the response object when raising an exception + if (response != null) + { + response.Dispose(); + } + + VssHttpEventSource.Log.HttpRequestUnauthorized(traceActivity, request, message); + VssUnauthorizedException unauthorizedException = new VssUnauthorizedException(message); + + if (provider != null) + { + unauthorizedException.Data.Add(CredentialsType, provider.CredentialType); + } + + throw unauthorizedException; + } + + return response; + } + catch (OperationCanceledException ex) + { + if (cancellationToken.IsCancellationRequested) + { + VssHttpEventSource.Log.HttpRequestCancelled(traceActivity, request); + throw; + } + else + { + VssHttpEventSource.Log.HttpRequestTimedOut(traceActivity, request, this.Settings.SendTimeout); + throw new TimeoutException(CommonResources.HttpRequestTimeout(this.Settings.SendTimeout), ex); + } + } + finally + { + // We always dispose of the token source since otherwise we leak resources if there is a timer pending + if (tokenSource != null) + { + tokenSource.Dispose(); + } + + traceInfo?.TraceTrailingTime(); + } + } + + private static async Task BufferRequestContentAsync( + HttpRequestMessage request, + CancellationToken cancellationToken) + { + if (request.Content != null && + request.Headers.TransferEncodingChunked != true) + { + Int64? contentLength = request.Content.Headers.ContentLength; + if (contentLength == null) + { + await request.Content.LoadIntoBufferAsync().EnforceCancellation(cancellationToken).ConfigureAwait(false); + } + + // Explicitly turn off chunked encoding since we have computed the request content size + request.Headers.TransferEncodingChunked = false; + } + } + + protected virtual async Task BufferResponseContentAsync( + HttpRequestMessage request, + HttpResponseMessage response, + Func makeErrorMessage, + CancellationToken cancellationToken) + { + // Determine whether or not we should go ahead and buffer the output under our timeout scope. If + // we do not perform this action here there is a potential network stack hang since we override + // the HttpClient.SendTimeout value and the cancellation token for monitoring request timeout does + // not survive beyond this scope. + if (response == null || response.StatusCode == HttpStatusCode.NoContent || response.Content == null) + { + return; + } + + // Do not try to buffer with a size of 0. This forces all calls to effectively use the behavior of + // HttpCompletionOption.ResponseHeadersRead if that is desired. + if (this.Settings.MaxContentBufferSize == 0) + { + return; + } + + // Read the completion option provided by the caller. If we don't find the property then we + // assume it is OK to buffer by default. + HttpCompletionOption completionOption; + if (!request.Properties.TryGetValue(VssHttpRequestSettings.HttpCompletionOptionPropertyName, out completionOption)) + { + completionOption = HttpCompletionOption.ResponseContentRead; + } + + // If the caller specified that response content should be read then we need to go ahead and + // buffer it all up to the maximum buffer size specified by the settings. Anything larger than + // the maximum will trigger an error in the underlying stack. + if (completionOption == HttpCompletionOption.ResponseContentRead) + { + await response.Content.LoadIntoBufferAsync(this.Settings.MaxContentBufferSize).EnforceCancellation(cancellationToken, makeErrorMessage).ConfigureAwait(false); + } + } + + private void ApplyHeaders(HttpRequestMessage request) + { + if (this.Settings.ApplyTo(request)) + { + VssTraceActivity activity = request.GetActivity(); + if (activity != null && + activity != VssTraceActivity.Empty && + !request.Headers.Contains(HttpHeaders.TfsSessionHeader)) + { + request.Headers.Add(HttpHeaders.TfsSessionHeader, activity.Id.ToString("D")); + } + + request.Headers.ExpectContinue = this.ExpectContinue; + } + } + + private void ApplyToken( + HttpRequestMessage request, + IssuedToken token, + bool applyICredentialsToWebProxy = false) + { + if (token == null) + { + return; + } + + ICredentials credentialsToken = token as ICredentials; + if (credentialsToken != null) + { + if (applyICredentialsToWebProxy) + { + HttpClientHandler httpClientHandler = m_transportHandler as HttpClientHandler; + + if (httpClientHandler != null && + httpClientHandler.Proxy != null) + { + httpClientHandler.Proxy.Credentials = credentialsToken; + } + } + + m_credentialWrapper.InnerCredentials = credentialsToken; + } + else + { + token.ApplyTo(new HttpRequestMessageWrapper(request)); + } + } + + private static void ApplySettings( + HttpMessageHandler handler, + ICredentials defaultCredentials, + VssHttpRequestSettings settings) + { + HttpClientHandler httpClientHandler = handler as HttpClientHandler; + if (httpClientHandler != null) + { + httpClientHandler.AllowAutoRedirect = settings.AllowAutoRedirect; + httpClientHandler.ClientCertificateOptions = ClientCertificateOption.Manual; + //Setting httpClientHandler.UseDefaultCredentials to false in .Net Core, clears httpClientHandler.Credentials if + //credentials is already set to defaultcredentials. Therefore httpClientHandler.Credentials must be + //set after httpClientHandler.UseDefaultCredentials. + httpClientHandler.UseDefaultCredentials = false; + httpClientHandler.Credentials = defaultCredentials; + httpClientHandler.PreAuthenticate = false; + httpClientHandler.Proxy = DefaultWebProxy; + httpClientHandler.UseCookies = false; + httpClientHandler.UseProxy = true; + + if (settings.CompressionEnabled) + { + httpClientHandler.AutomaticDecompression = DecompressionMethods.GZip; + } + } + } + + private static IWebProxy s_defaultWebProxy = +#if !NETSTANDARD + WebRequest.DefaultWebProxy; +#else + // setting this to WebRequest.DefaultWebProxy in NETSTANDARD is causing a System.PlatformNotSupportedException + //.in System.Net.SystemWebProxy.IsBypassed. Comment in IsBypassed method indicates ".NET Core and .NET Native + // code will handle this exception and call into WinInet/WinHttp as appropriate to use the system proxy." + // This needs to be investigated further. + null; +#endif + + /// + /// Allows you to set a proxy to be used by all VssHttpMessageHandler requests without affecting the global WebRequest.DefaultWebProxy. If not set it returns the WebRequest.DefaultWebProxy. + /// + public static IWebProxy DefaultWebProxy + { + get + { + var toReturn = WebProxyWrapper.Wrap(s_defaultWebProxy); + + if (null != toReturn && + toReturn.Credentials == null) + { + toReturn.Credentials = CredentialCache.DefaultCredentials; + } + + return toReturn; + } + set + { + // requested by Insights team to be able to set a default Proxy that only affects this handler. + // see following bug for details: https://mseng.visualstudio.com/DefaultCollection/VSOnline/_workitems#_a=edit&id=425575&triage=true + s_defaultWebProxy = value; + } + } + + internal const String CredentialsType = nameof(CredentialsType); + + private const Int32 m_maxAuthRetries = 3; + private HttpMessageInvoker m_messageInvoker; + private CredentialWrapper m_credentialWrapper; + private bool m_appliedClientCertificatesToTransportHandler; + private bool m_appliedServerCertificateValidationCallbackToTransportHandler; + private readonly HttpMessageHandler m_transportHandler; + +#if NETSTANDARD + //.Net Core does not attempt NTLM schema on Linux, unless ICredentials is a CredentialCache instance + //This workaround may not be needed after this corefx fix is consumed: https://github.com/dotnet/corefx/pull/7923 + private sealed class CredentialWrapper : CredentialCache, ICredentials +#else + private sealed class CredentialWrapper : ICredentials +#endif + { + public ICredentials InnerCredentials + { + get; + set; + } + + NetworkCredential ICredentials.GetCredential( + Uri uri, + String authType) + { + return InnerCredentials != null ? InnerCredentials.GetCredential(uri, authType) : null; + } + } + + private sealed class WebProxyWrapper : IWebProxy + { + private WebProxyWrapper(IWebProxy toWrap) + { + m_wrapped = toWrap; + m_credentials = null; + } + + public static WebProxyWrapper Wrap(IWebProxy toWrap) + { + if (null == toWrap) + { + return null; + } + + return new WebProxyWrapper(toWrap); + } + + public ICredentials Credentials + { + get + { + ICredentials credentials = m_credentials; + + if (null == credentials) + { + // This means to fall back to the Credentials from the wrapped + // IWebProxy. + credentials = m_wrapped.Credentials; + } + else if (Object.ReferenceEquals(credentials, m_nullCredentials)) + { + // This sentinel value means we have explicitly had our credentials + // set to null. + credentials = null; + } + + return credentials; + } + + set + { + if (null == value) + { + // Use this as a sentinel value to distinguish the case when someone has + // explicitly set our credentials to null. We don't want to fall back to + // m_wrapped.Credentials when we have credentials that are explicitly null. + m_credentials = m_nullCredentials; + } + else + { + m_credentials = value; + } + } + } + + public Uri GetProxy(Uri destination) + { + return m_wrapped.GetProxy(destination); + } + + public bool IsBypassed(Uri host) + { + return m_wrapped.IsBypassed(host); + } + + private readonly IWebProxy m_wrapped; + private ICredentials m_credentials; + + private static readonly ICredentials m_nullCredentials = new CredentialWrapper(); + } + } +} diff --git a/src/Sdk/Common/Common/VssHttpMessageHandlerTraceInfo.cs b/src/Sdk/Common/Common/VssHttpMessageHandlerTraceInfo.cs new file mode 100644 index 00000000000..3b5d63af687 --- /dev/null +++ b/src/Sdk/Common/Common/VssHttpMessageHandlerTraceInfo.cs @@ -0,0 +1,109 @@ +using System; +using System.Net.Http; + +namespace GitHub.Services.Common +{ + /// + /// This class is used by the message handler, if injected as a request property, to trace additional + /// timing details for outgoing requests. This information is added to the HttpOutgoingRequest logs + /// + public class VssHttpMessageHandlerTraceInfo + { + DateTime _lastTime; + + static readonly String TfsTraceInfoKey = "TFS_TraceInfo"; + + public int TokenRetries { get; internal set; } + + public TimeSpan HandlerStartTime { get; private set; } + public TimeSpan BufferedRequestTime { get; private set; } + public TimeSpan RequestSendTime { get; private set; } + public TimeSpan ResponseContentTime { get; private set; } + public TimeSpan GetTokenTime { get; private set; } + public TimeSpan TrailingTime { get; private set; } + + public VssHttpMessageHandlerTraceInfo() + { + _lastTime = DateTime.UtcNow; + } + + internal void TraceHandlerStartTime() + { + var previous = _lastTime; + _lastTime = DateTime.UtcNow; + HandlerStartTime += (_lastTime - previous); + } + + internal void TraceBufferedRequestTime() + { + var previous = _lastTime; + _lastTime = DateTime.UtcNow; + BufferedRequestTime += (_lastTime - previous); + } + + internal void TraceRequestSendTime() + { + var previous = _lastTime; + _lastTime = DateTime.UtcNow; + RequestSendTime += (_lastTime - previous); + } + + internal void TraceResponseContentTime() + { + var previous = _lastTime; + _lastTime = DateTime.UtcNow; + ResponseContentTime += (_lastTime - previous); + } + + internal void TraceGetTokenTime() + { + var previous = _lastTime; + _lastTime = DateTime.UtcNow; + GetTokenTime += (_lastTime - previous); + } + + internal void TraceTrailingTime() + { + var previous = _lastTime; + _lastTime = DateTime.UtcNow; + TrailingTime += (_lastTime - previous); + } + + /// + /// Set the provided traceInfo as a property on a request message (if not already set) + /// + /// + /// + public static void SetTraceInfo(HttpRequestMessage message, VssHttpMessageHandlerTraceInfo traceInfo) + { + object existingTraceInfo; + if (!message.Properties.TryGetValue(TfsTraceInfoKey, out existingTraceInfo)) + { + message.Properties.Add(TfsTraceInfoKey, traceInfo); + } + } + + /// + /// Get VssHttpMessageHandlerTraceInfo from request message, or return null if none found + /// + /// + /// + public static VssHttpMessageHandlerTraceInfo GetTraceInfo(HttpRequestMessage message) + { + VssHttpMessageHandlerTraceInfo traceInfo = null; + + if (message.Properties.TryGetValue(TfsTraceInfoKey, out object traceInfoObject)) + { + traceInfo = traceInfoObject as VssHttpMessageHandlerTraceInfo; + } + + return traceInfo; + } + + public override string ToString() + { + return $"R:{TokenRetries}, HS:{HandlerStartTime.Ticks}, BR:{BufferedRequestTime.Ticks}, RS:{RequestSendTime.Ticks}, RC:{ResponseContentTime.Ticks}, GT:{GetTokenTime.Ticks}, TT={TrailingTime.Ticks}"; + } + } + +} diff --git a/src/Sdk/Common/Common/VssHttpRequestSettings.cs b/src/Sdk/Common/Common/VssHttpRequestSettings.cs new file mode 100644 index 00000000000..18f4c26d6c6 --- /dev/null +++ b/src/Sdk/Common/Common/VssHttpRequestSettings.cs @@ -0,0 +1,416 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Threading; +using System.Net.Security; +using System.Security.Cryptography.X509Certificates; + +namespace GitHub.Services.Common +{ + /// + /// Provides common settings for a VssHttpMessageHandler instance. + /// + public class VssHttpRequestSettings + { + /// + /// Initializes a new VssHttpRequestSettings instance with compression enabled. + /// + public VssHttpRequestSettings() + : this(Guid.NewGuid()) + { + } + + /// + /// Initializes a new VssHttpRequestSettings instance with compression enabled. + /// + public VssHttpRequestSettings(Guid sessionId) + { + this.AllowAutoRedirect = false; + this.CompressionEnabled = true; + this.ExpectContinue = true; + this.BypassProxyOnLocal = true; + this.MaxContentBufferSize = c_defaultContentBufferSize; + this.SendTimeout = s_defaultTimeout; + if (!String.IsNullOrEmpty(CultureInfo.CurrentUICulture.Name)) // InvariantCulture for example has an empty name. + { + this.AcceptLanguages.Add(CultureInfo.CurrentUICulture); + } + this.SessionId = sessionId; + this.SuppressFedAuthRedirects = true; + this.ClientCertificateManager = null; + this.ServerCertificateValidationCallback = null; +#if NETSTANDARD + this.UseHttp11 = false; +#endif + + // If different, we'll also add CurrentCulture to the request headers, + // but UICulture was added first, so it gets first preference + if (!CultureInfo.CurrentCulture.Equals(CultureInfo.CurrentUICulture) && !String.IsNullOrEmpty(CultureInfo.CurrentCulture.Name)) + { + this.AcceptLanguages.Add(CultureInfo.CurrentCulture); + } + + this.MaxRetryRequest = c_defaultMaxRetry; + +#if DEBUG + string customClientRequestTimeout = Environment.GetEnvironmentVariable("VSS_Client_Request_Timeout"); + if (!string.IsNullOrEmpty(customClientRequestTimeout) && int.TryParse(customClientRequestTimeout, out int customTimeout)) + { + // avoid disrupting a debug session due to the request timing out by setting a custom timeout. + this.SendTimeout = TimeSpan.FromSeconds(customTimeout); + } +#endif + } + + /// + /// Initializes a new VssHttpRequestSettings instance with compression enabled. + /// + /// The e2eId argument is not used. + [Obsolete] + [EditorBrowsable(EditorBrowsableState.Never)] + public VssHttpRequestSettings(Guid sessionId, Guid e2eId) + : this(sessionId) + { + } + + /// + /// Copy Constructor + /// + /// + protected VssHttpRequestSettings(VssHttpRequestSettings copy) + { + this.AllowAutoRedirect = copy.AllowAutoRedirect; + this.CompressionEnabled = copy.CompressionEnabled; + this.ExpectContinue = copy.ExpectContinue; + this.BypassProxyOnLocal = copy.BypassProxyOnLocal; + this.MaxContentBufferSize = copy.MaxContentBufferSize; + this.SendTimeout = copy.SendTimeout; + this.m_acceptLanguages = new List(copy.AcceptLanguages); + this.SessionId = copy.SessionId; + this.AgentId = copy.AgentId; + this.SuppressFedAuthRedirects = copy.SuppressFedAuthRedirects; + this.UserAgent = new List(copy.UserAgent); + this.OperationName = copy.OperationName; + this.ClientCertificateManager = copy.ClientCertificateManager; + this.ServerCertificateValidationCallback = copy.ServerCertificateValidationCallback; + this.MaxRetryRequest = copy.MaxRetryRequest; +#if NETSTANDARD + this.UseHttp11 = copy.UseHttp11; +#endif + } + + /// + /// Gets or sets a value indicating whether or not HttpClientHandler should follow redirect on outgoing requests. + /// + public Boolean AllowAutoRedirect + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not compression should be used on outgoing requests. + /// The default value is true. + /// + [DefaultValue(true)] + public Boolean CompressionEnabled + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not the Expect: 100-continue header should be sent on + /// outgoing requess. The default value is true. + /// + [DefaultValue(true)] + public Boolean ExpectContinue + { + get; + set; + } + + /// + /// Sets whether to bypass web proxies if the call is local + /// + public Boolean BypassProxyOnLocal + { + get; + set; + } + +#if NETSTANDARD + /// + /// The .NET Core 2.1 runtime switched its HTTP default from HTTP 1.1 to HTTP 2. + /// This causes problems with some versions of the Curl handler on Linux. + /// See GitHub issue https://github.com/dotnet/corefx/issues/32376 + /// If true, requests generated by this client will use HTTP 1.1. + /// + public Boolean UseHttp11 + { + get; + set; + } +#endif + + /// + /// Gets or sets the maximum size allowed for response content buffering. + /// + [DefaultValue(c_defaultContentBufferSize)] + public Int32 MaxContentBufferSize + { + get + { + return m_maxContentBufferSize; + } + set + { + ArgumentUtility.CheckForOutOfRange(value, nameof(value), 0, c_maxAllowedContentBufferSize); + m_maxContentBufferSize = value; + } + } + + /// + /// Timespan to wait before timing out a request. Defaults to 100 seconds + /// + public TimeSpan SendTimeout + { + get; + set; + } + + /// + /// Provides a hint to the server requesting that rather than getting 302 redirects as part of FedAuth flows 401 and 403 are passed through. + /// + [DefaultValue(true)] + public Boolean SuppressFedAuthRedirects + { + get; + set; + } + + /// + /// User-Agent header passed along in the request, + /// For multiple values, the order in the list is the order + /// in which they will appear in the header + /// + public List UserAgent + { + get; + set; + } + + /// + /// The name of the culture is passed in the Accept-Language header + /// + public ICollection AcceptLanguages + { + get + { + return m_acceptLanguages; + } + } + + /// + /// A unique identifier for the user session + /// + public Guid SessionId + { + get; + set; + } + + /// + /// End to End ID which gets propagated everywhere unchanged + /// + public Guid E2EId + { + get; + set; + } + + /// + /// This is a kind of combination between SessionId and UserAgent. + /// If supplied, the value should be a string that uniquely identifies + /// this application running on this particular machine. + /// The server will then use this value + /// to correlate user requests, even if the process restarts. + /// + public String AgentId + { + get; + set; + } + + /// + /// An optional string that is sent in the SessionId header used to group a set of operations together + /// + public String OperationName + { + get; + set; + } + + /// + /// Optional implementation used to gather client certificates + /// for connections that require them + /// + public IVssClientCertificateManager ClientCertificateManager + { + get; + set; + } + +#if !NETSTANDARD + /// + /// Optional implementation used to validate server certificate validation + /// + public RemoteCertificateValidationCallback ServerCertificateValidationCallback + { + get; set; + } +#else + /// + /// Optional implementation used to validate server certificate validation + /// + public Func ServerCertificateValidationCallback + { + get; + set; + } +#endif + + /// + /// Number of times to retry a request that has an ambient failure + /// + /// + /// This property is only used by VssConnection, so only relevant on the client + /// + [DefaultValue(c_defaultMaxRetry)] + public Int32 MaxRetryRequest + { + get; + set; + } + + protected internal virtual Boolean IsHostLocal(String hostName) + { + //base class always returns false. See VssClientHttpRequestSettings for override + return false; + } + + protected internal virtual Boolean ApplyTo(HttpRequestMessage request) + { + // Make sure we only apply the settings to the request once + if (request.Properties.ContainsKey(PropertyName)) + { + return false; + } + + request.Properties.Add(PropertyName, this); + + if (this.AcceptLanguages != null && this.AcceptLanguages.Count > 0) + { + // An empty or null CultureInfo name will cause an ArgumentNullException in the + // StringWithQualityHeaderValue constructor. CultureInfo.InvariantCulture is an example of + // a CultureInfo that has an empty name. + foreach (CultureInfo culture in this.AcceptLanguages.Where(a => !String.IsNullOrEmpty(a.Name))) + { + request.Headers.AcceptLanguage.Add(new StringWithQualityHeaderValue(culture.Name)); + } + } + + if (this.UserAgent != null) + { + foreach (var headerVal in this.UserAgent) + { + if (!request.Headers.UserAgent.Contains(headerVal)) + { + request.Headers.UserAgent.Add(headerVal); + } + } + } + + if (this.SuppressFedAuthRedirects) + { + request.Headers.Add(Internal.HttpHeaders.TfsFedAuthRedirect, "Suppress"); + } + + // Record the command, if we have it. Otherwise, just record the session ID. + if (!request.Headers.Contains(Internal.HttpHeaders.TfsSessionHeader)) + { + if (!String.IsNullOrEmpty(this.OperationName)) + { + request.Headers.Add(Internal.HttpHeaders.TfsSessionHeader, String.Concat(this.SessionId.ToString("D"), ", ", this.OperationName)); + } + else + { + request.Headers.Add(Internal.HttpHeaders.TfsSessionHeader, this.SessionId.ToString("D")); + } + } + + if (!String.IsNullOrEmpty(this.AgentId)) + { + request.Headers.Add(Internal.HttpHeaders.VssAgentHeader, this.AgentId); + } + +#if NETSTANDARD + // Content is being sent as chunked by default in dotnet5.4, which differs than the .net 4.5 behaviour. + if (request.Content != null && !request.Content.Headers.ContentLength.HasValue && !request.Headers.TransferEncodingChunked.HasValue) + { + request.Content.Headers.ContentLength = request.Content.ReadAsByteArrayAsync().Result.Length; + } +#endif + + return true; + } + + /// + /// Gets the encoding used for outgoing requests. + /// + public static Encoding Encoding + { + get + { + return s_encoding.Value; + } + } + + /// + /// Gets the property name used to reference this object. + /// + public const String PropertyName = "MS.VS.RequestSettings"; + + /// + /// Gets the property name used to reference the completion option for a specific request. + /// + public const String HttpCompletionOptionPropertyName = "MS.VS.HttpCompletionOption"; + + /// + /// Header to include the light weight response client option. + /// + public const string LightweightHeader = "lightweight"; + + /// + /// Header to include the exclude urls client option. + /// + public const string ExcludeUrlsHeader = "excludeUrls"; + + private Int32 m_maxContentBufferSize; + private ICollection m_acceptLanguages = new List(); + private static Lazy s_encoding = new Lazy(() => new UTF8Encoding(false), LazyThreadSafetyMode.PublicationOnly); + private static readonly TimeSpan s_defaultTimeout = TimeSpan.FromSeconds(100); //default WebAPI timeout + private const Int32 c_defaultMaxRetry = 3; + + // We will buffer a maximum of 1024MB in the message handler + private const Int32 c_maxAllowedContentBufferSize = 1024 * 1024 * 1024; + + // We will buffer, by default, up to 512MB in the message handler + private const Int32 c_defaultContentBufferSize = 1024 * 1024 * 512; + } +} diff --git a/src/Sdk/Common/Common/VssHttpRetryMessageHandler.cs b/src/Sdk/Common/Common/VssHttpRetryMessageHandler.cs new file mode 100644 index 00000000000..69c6dacf299 --- /dev/null +++ b/src/Sdk/Common/Common/VssHttpRetryMessageHandler.cs @@ -0,0 +1,232 @@ +using GitHub.Services.Common.Diagnostics; +using System; +using System.ComponentModel; +using System.Diagnostics; +using System.Net; +using System.Net.Http; +using System.Net.Sockets; +using System.Threading; +using System.Threading.Tasks; +using System.Collections.Generic; +using System.Linq; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Common +{ + /// + /// Handles automatic replay of HTTP requests when errors are encountered based on a configurable set of options. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class VssHttpRetryMessageHandler : DelegatingHandler + { + public VssHttpRetryMessageHandler(Int32 maxRetries) + : this(new VssHttpRetryOptions { MaxRetries = maxRetries }) + { + } + + public VssHttpRetryMessageHandler(Int32 maxRetries, string clientName) + : this(new VssHttpRetryOptions { MaxRetries = maxRetries }) + { + m_clientName = clientName; + } + + public VssHttpRetryMessageHandler(VssHttpRetryOptions options) + { + m_retryOptions = options; + } + + public VssHttpRetryMessageHandler( + VssHttpRetryOptions options, + HttpMessageHandler innerHandler) + : base(innerHandler) + { + m_retryOptions = options; + } + + protected override async Task SendAsync( + HttpRequestMessage request, + CancellationToken cancellationToken) + { + Int32 attempt = 1; + HttpResponseMessage response = null; + HttpRequestException exception = null; + VssTraceActivity traceActivity = VssTraceActivity.Current; + + // Allow overriding default retry options per request + VssHttpRetryOptions retryOptions = m_retryOptions; + object retryOptionsObject; + if (request.Properties.TryGetValue(HttpRetryOptionsKey, out retryOptionsObject)) // NETSTANDARD compliant, TryGetValue is not + { + // Fallback to default options if object of unexpected type was passed + retryOptions = retryOptionsObject as VssHttpRetryOptions ?? m_retryOptions; + } + + TimeSpan minBackoff = retryOptions.MinBackoff; + Int32 maxAttempts = retryOptions.MaxRetries + 1; + + IVssHttpRetryInfo retryInfo = null; + object retryInfoObject; + if (request.Properties.TryGetValue(HttpRetryInfoKey, out retryInfoObject)) // NETSTANDARD compliant, TryGetValue is not + { + retryInfo = retryInfoObject as IVssHttpRetryInfo; + } + + if (IsLowPriority(request)) + { + // Increase the backoff and retry count, low priority requests can be retried many times if the server is busy. + minBackoff = TimeSpan.FromSeconds(minBackoff.TotalSeconds * 2); + maxAttempts = maxAttempts * 10; + } + + TimeSpan backoff = minBackoff; + + while (attempt <= maxAttempts) + { + // Reset the exception so we don't have a lingering variable + exception = null; + + Boolean canRetry = false; + SocketError? socketError = null; + HttpStatusCode? statusCode = null; + WebExceptionStatus? webExceptionStatus = null; + WinHttpErrorCode? winHttpErrorCode = null; + CurlErrorCode? curlErrorCode = null; + string afdRefInfo = null; + try + { + if (attempt == 1) + { + retryInfo?.InitialAttempt(request); + } + + response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (attempt > 1) + { + TraceHttpRequestSucceededWithRetry(traceActivity, response, attempt); + } + + // Verify the response is successful or the status code is one that may be retried. + if (response.IsSuccessStatusCode) + { + break; + } + else + { + statusCode = response.StatusCode; + afdRefInfo = response.Headers.TryGetValues(HttpHeaders.AfdResponseRef, out var headers) ? headers.First() : null; + canRetry = m_retryOptions.IsRetryableResponse(response); + } + } + catch (HttpRequestException ex) + { + exception = ex; + canRetry = VssNetworkHelper.IsTransientNetworkException(exception, m_retryOptions, out statusCode, out webExceptionStatus, out socketError, out winHttpErrorCode, out curlErrorCode); + } + catch (TimeoutException) + { + throw; + } + + if (attempt < maxAttempts && canRetry) + { + backoff = BackoffTimerHelper.GetExponentialBackoff(attempt, minBackoff, m_retryOptions.MaxBackoff, m_retryOptions.BackoffCoefficient); + retryInfo?.Retry(backoff); + TraceHttpRequestRetrying(traceActivity, request, attempt, backoff, statusCode, webExceptionStatus, socketError, winHttpErrorCode, curlErrorCode, afdRefInfo); + } + else + { + if (attempt < maxAttempts) + { + if (exception == null) + { + TraceHttpRequestFailed(traceActivity, request, statusCode != null ? statusCode.Value : (HttpStatusCode)0, afdRefInfo); + } + else + { + TraceHttpRequestFailed(traceActivity, request, exception); + } + } + else + { + TraceHttpRequestFailedMaxAttempts(traceActivity, request, attempt, statusCode, webExceptionStatus, socketError, winHttpErrorCode, curlErrorCode, afdRefInfo); + } + break; + } + + // Make sure to dispose of this so we don't keep the connection open + if (response != null) + { + response.Dispose(); + } + + attempt++; + TraceRaw(request, 100011, TraceLevel.Error, + "{{ \"Client\":\"{0}\", \"Endpoint\":\"{1}\", \"Attempt\":{2}, \"MaxAttempts\":{3}, \"Backoff\":{4} }}", + m_clientName, + request.RequestUri.Host, + attempt, + maxAttempts, + backoff.TotalMilliseconds); + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + } + + if (exception != null) + { + throw exception; + } + + return response; + } + + protected virtual void TraceRaw(HttpRequestMessage request, int tracepoint, TraceLevel level, string message, params object[] args) + { + // implement in Server so retries are recorded in ProductTrace + } + + protected virtual void TraceHttpRequestFailed(VssTraceActivity activity, HttpRequestMessage request, HttpStatusCode statusCode, string afdRefInfo) + { + VssHttpEventSource.Log.HttpRequestFailed(activity, request, statusCode, afdRefInfo); + } + + protected virtual void TraceHttpRequestFailed(VssTraceActivity activity, HttpRequestMessage request, Exception exception) + { + VssHttpEventSource.Log.HttpRequestFailed(activity, request, exception); + } + + protected virtual void TraceHttpRequestFailedMaxAttempts(VssTraceActivity activity, HttpRequestMessage request, Int32 attempt, HttpStatusCode? httpStatusCode, WebExceptionStatus? webExceptionStatus, SocketError? socketErrorCode, WinHttpErrorCode? winHttpErrorCode, CurlErrorCode? curlErrorCode, string afdRefInfo) + { + VssHttpEventSource.Log.HttpRequestFailedMaxAttempts(activity, request, attempt, httpStatusCode, webExceptionStatus, socketErrorCode, winHttpErrorCode, curlErrorCode, afdRefInfo); + } + + protected virtual void TraceHttpRequestSucceededWithRetry(VssTraceActivity activity, HttpResponseMessage response, Int32 attempt) + { + VssHttpEventSource.Log.HttpRequestSucceededWithRetry(activity, response, attempt); + } + + protected virtual void TraceHttpRequestRetrying(VssTraceActivity activity, HttpRequestMessage request, Int32 attempt, TimeSpan backoffDuration, HttpStatusCode? httpStatusCode, WebExceptionStatus? webExceptionStatus, SocketError? socketErrorCode, WinHttpErrorCode? winHttpErrorCode, CurlErrorCode? curlErrorCode, string afdRefInfo) + { + VssHttpEventSource.Log.HttpRequestRetrying(activity, request, attempt, backoffDuration, httpStatusCode, webExceptionStatus, socketErrorCode, winHttpErrorCode, curlErrorCode, afdRefInfo); + } + + private static bool IsLowPriority(HttpRequestMessage request) + { + bool isLowPriority = false; + + IEnumerable headers; + + if (request.Headers.TryGetValues(HttpHeaders.VssRequestPriority, out headers) && headers != null) + { + string header = headers.FirstOrDefault(); + isLowPriority = string.Equals(header, "Low", StringComparison.OrdinalIgnoreCase); + } + + return isLowPriority; + } + + private VssHttpRetryOptions m_retryOptions; + public const string HttpRetryInfoKey = "HttpRetryInfo"; + public const string HttpRetryOptionsKey = "VssHttpRetryOptions"; + private string m_clientName = ""; + } +} diff --git a/src/Sdk/Common/Common/VssHttpRetryOptions.cs b/src/Sdk/Common/Common/VssHttpRetryOptions.cs new file mode 100644 index 00000000000..6f3b9c6cd4c --- /dev/null +++ b/src/Sdk/Common/Common/VssHttpRetryOptions.cs @@ -0,0 +1,200 @@ +using GitHub.Services.Common.Internal; +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading; + +namespace GitHub.Services.Common +{ + /// + /// Defines the options used for configuring the retry policy. + /// + public class VssHttpRetryOptions + { + public VssHttpRetryOptions() + : this (new VssHttpRetryableStatusCodeFilter[] { s_hostShutdownFilter } ) + { + } + + public VssHttpRetryOptions(IEnumerable filters) + { + this.BackoffCoefficient = s_backoffCoefficient; + this.MinBackoff = s_minBackoff; + this.MaxBackoff = s_maxBackoff; + this.MaxRetries = 5; + this.RetryableStatusCodes = new HashSet + { + HttpStatusCode.BadGateway, + HttpStatusCode.GatewayTimeout, + HttpStatusCode.ServiceUnavailable, + }; + + this.m_retryFilters = new HashSet(filters); + } + + /// + /// Gets a singleton read-only instance of the default settings. + /// + public static VssHttpRetryOptions Default + { + get + { + return s_defaultOptions.Value; + } + } + + /// + /// Gets or sets the coefficient which exponentially increases the backoff starting at . + /// + public TimeSpan BackoffCoefficient + { + get + { + return m_backoffCoefficient; + } + set + { + ThrowIfReadonly(); + m_backoffCoefficient = value; + } + } + + /// + /// Gets or sets the minimum backoff interval to be used. + /// + public TimeSpan MinBackoff + { + get + { + return m_minBackoff; + } + set + { + ThrowIfReadonly(); + m_minBackoff = value; + } + } + + /// + /// Gets or sets the maximum backoff interval to be used. + /// + public TimeSpan MaxBackoff + { + get + { + return m_maxBackoff; + } + set + { + ThrowIfReadonly(); + m_maxBackoff = value; + } + } + + /// + /// Gets or sets the maximum number of retries allowed. + /// + public Int32 MaxRetries + { + get + { + return m_maxRetries; + } + set + { + ThrowIfReadonly(); + m_maxRetries = value; + } + } + + /// + /// Gets a set of HTTP status codes which should be retried. + /// + public ICollection RetryableStatusCodes + { + get + { + return m_retryableStatusCodes; + } + private set + { + ThrowIfReadonly(); + m_retryableStatusCodes = value; + } + } + + /// + /// How to verify that the response can be retried. + /// + /// Response message from a request + /// True if the request can be retried, false otherwise. + public Boolean IsRetryableResponse(HttpResponseMessage response) + { + if (m_retryableStatusCodes.Contains(response.StatusCode)) + { + foreach (VssHttpRetryableStatusCodeFilter filter in m_retryFilters) + { + if (filter(response)) + { + return false; + } + } + + return true; + } + + return false; + } + + /// + /// Ensures that no further modifications may be made to the retry options. + /// + /// A read-only instance of the retry options + public VssHttpRetryOptions MakeReadonly() + { + if (Interlocked.CompareExchange(ref m_isReadOnly, 1, 0) == 0) + { + m_retryableStatusCodes = new ReadOnlyCollection(m_retryableStatusCodes.ToList()); + m_retryFilters = new ReadOnlyCollection(m_retryFilters.ToList()); + } + return this; + } + + + + /// + /// Throws an InvalidOperationException if this is marked as ReadOnly. + /// + private void ThrowIfReadonly() + { + if (m_isReadOnly > 0) + { + throw new InvalidOperationException(); + } + } + + /// + /// Returns false if we should continue retrying based on the response, and true if we should not, even though + /// this is technically a retryable status code. + /// + /// The response to check if we should retry the request. + /// False if we should retry, true if we should not based on the response. + public delegate Boolean VssHttpRetryableStatusCodeFilter(HttpResponseMessage response); + + private Int32 m_isReadOnly; + private Int32 m_maxRetries; + private TimeSpan m_minBackoff; + private TimeSpan m_maxBackoff; + private TimeSpan m_backoffCoefficient; + private ICollection m_retryableStatusCodes; + private ICollection m_retryFilters; + private static TimeSpan s_minBackoff = TimeSpan.FromSeconds(10); + private static TimeSpan s_maxBackoff = TimeSpan.FromMinutes(10); + private static TimeSpan s_backoffCoefficient = TimeSpan.FromSeconds(1); + private static Lazy s_defaultOptions = new Lazy(() => new VssHttpRetryOptions().MakeReadonly()); + private static VssHttpRetryableStatusCodeFilter s_hostShutdownFilter = new VssHttpRetryableStatusCodeFilter(response => response.Headers.Contains(HttpHeaders.VssHostOfflineError)); + } +} diff --git a/src/Sdk/Common/Common/VssNetworkHelper.cs b/src/Sdk/Common/Common/VssNetworkHelper.cs new file mode 100644 index 00000000000..49d69d99dcd --- /dev/null +++ b/src/Sdk/Common/Common/VssNetworkHelper.cs @@ -0,0 +1,236 @@ +using System; +using System.ComponentModel; +using System.IO; +using System.Net; +using System.Net.Sockets; + +namespace GitHub.Services.Common +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class VssNetworkHelper + { + /// + /// Heuristic used to determine whether an exception is a transient network + /// failure that should be retried. + /// + public static bool IsTransientNetworkException(Exception ex) + { + return IsTransientNetworkException(ex, new VssHttpRetryOptions()); + } + + /// + /// Heuristic used to determine whether an exception is a transient network + /// failure that should be retried. + /// + public static bool IsTransientNetworkException( + Exception ex, + VssHttpRetryOptions options) + { + HttpStatusCode? httpStatusCode; + WebExceptionStatus? webExceptionStatus; + SocketError? socketErrorCode; + WinHttpErrorCode? winHttpErrorCode; + CurlErrorCode? curlErrorCode; + return IsTransientNetworkException(ex, options, out httpStatusCode, out webExceptionStatus, out socketErrorCode, out winHttpErrorCode, out curlErrorCode); + } + + /// + /// Heuristic used to determine whether an exception is a transient network + /// failure that should be retried. + /// + public static bool IsTransientNetworkException( + Exception ex, + out HttpStatusCode? httpStatusCode, + out WebExceptionStatus? webExceptionStatus, + out SocketError? socketErrorCode, + out WinHttpErrorCode? winHttpErrorCode, + out CurlErrorCode? curlErrorCode) + { + return IsTransientNetworkException(ex, VssHttpRetryOptions.Default, out httpStatusCode, out webExceptionStatus, out socketErrorCode, out winHttpErrorCode, out curlErrorCode); + } + + /// + /// Heuristic used to determine whether an exception is a transient network + /// failure that should be retried. + /// + public static bool IsTransientNetworkException( + Exception ex, + VssHttpRetryOptions options, + out HttpStatusCode? httpStatusCode, + out WebExceptionStatus? webExceptionStatus, + out SocketError? socketErrorCode, + out WinHttpErrorCode? winHttpErrorCode, + out CurlErrorCode? curlErrorCode) + { + httpStatusCode = null; + webExceptionStatus = null; + socketErrorCode = null; + winHttpErrorCode = null; + curlErrorCode = null; + + while (ex != null) + { + if (IsTransientNetworkExceptionHelper(ex, options, out httpStatusCode, out webExceptionStatus, out socketErrorCode, out winHttpErrorCode, out curlErrorCode)) + { + return true; + } + + ex = ex.InnerException; + } + + return false; + } + + /// + /// Helper which checks a particular Exception instance (non-recursive). + /// + private static bool IsTransientNetworkExceptionHelper( + Exception ex, + VssHttpRetryOptions options, + out HttpStatusCode? httpStatusCode, + out WebExceptionStatus? webExceptionStatus, + out SocketError? socketErrorCode, + out WinHttpErrorCode? winHttpErrorCode, + out CurlErrorCode? curlErrorCode) + { + ArgumentUtility.CheckForNull(ex, "ex"); + + httpStatusCode = null; + webExceptionStatus = null; + socketErrorCode = null; + winHttpErrorCode = null; + curlErrorCode = null; + + if (ex is WebException) + { + WebException webEx = (WebException)ex; + + if (webEx.Response != null && webEx.Response is HttpWebResponse) + { + var httpResponse = (HttpWebResponse)webEx.Response; + httpStatusCode = httpResponse.StatusCode; + + // If the options include this status code as a retryable error then we report the exception + // as transient to the caller + if (options.RetryableStatusCodes.Contains(httpResponse.StatusCode)) + { + return true; + } + } + + webExceptionStatus = webEx.Status; + + if (webEx.Status == WebExceptionStatus.ConnectFailure || + webEx.Status == WebExceptionStatus.ConnectionClosed || + webEx.Status == WebExceptionStatus.KeepAliveFailure || + webEx.Status == WebExceptionStatus.NameResolutionFailure || + webEx.Status == WebExceptionStatus.ReceiveFailure || + webEx.Status == WebExceptionStatus.SendFailure || + webEx.Status == WebExceptionStatus.Timeout) + { + return true; + } + } + else if (ex is SocketException) + { + SocketException sockEx = (SocketException)ex; + + socketErrorCode = sockEx.SocketErrorCode; + + if (sockEx.SocketErrorCode == SocketError.Interrupted || + sockEx.SocketErrorCode == SocketError.NetworkDown || + sockEx.SocketErrorCode == SocketError.NetworkUnreachable || + sockEx.SocketErrorCode == SocketError.NetworkReset || + sockEx.SocketErrorCode == SocketError.ConnectionAborted || + sockEx.SocketErrorCode == SocketError.ConnectionReset || + sockEx.SocketErrorCode == SocketError.TimedOut || + sockEx.SocketErrorCode == SocketError.HostDown || + sockEx.SocketErrorCode == SocketError.HostUnreachable || + sockEx.SocketErrorCode == SocketError.TryAgain) + { + return true; + } + } + else if (ex is Win32Exception) // WinHttpException when use WinHttp (dotnet core) + { + Win32Exception winHttpEx = (Win32Exception)ex; + + Int32 errorCode = winHttpEx.NativeErrorCode; + if (errorCode > (Int32)WinHttpErrorCode.WINHTTP_ERROR_BASE && + errorCode <= (Int32)WinHttpErrorCode.WINHTTP_ERROR_LAST) + { + winHttpErrorCode = (WinHttpErrorCode)errorCode; + + if (winHttpErrorCode == WinHttpErrorCode.ERROR_WINHTTP_CANNOT_CONNECT || + winHttpErrorCode == WinHttpErrorCode.ERROR_WINHTTP_CONNECTION_ERROR || + winHttpErrorCode == WinHttpErrorCode.ERROR_WINHTTP_INTERNAL_ERROR || + winHttpErrorCode == WinHttpErrorCode.ERROR_WINHTTP_NAME_NOT_RESOLVED || + winHttpErrorCode == WinHttpErrorCode.ERROR_WINHTTP_TIMEOUT) + { + return true; + } + } + } + else if (ex is IOException) + { + if (null != ex.InnerException && + ex.InnerException is Win32Exception) + { + String stackTrace = ex.StackTrace; + + if (null != stackTrace && + stackTrace.IndexOf("System.Net.Security._SslStream.StartWriting(", StringComparison.Ordinal) >= 0) + { + // HACK: There is an underlying HRESULT code for this error which is not set on the exception which + // bubbles from the underlying stack. The top of the stack trace will be in the _SslStream class + // and will have an exception chain of HttpRequestException -> IOException -> Win32Exception. + + // Check for SEC_E_CONTEXT_EXPIRED as this occurs at random in the underlying stack. Retrying the + // request should get a new connection and work correctly, so we ignore this particular error. + + return true; + } + } + } + else if (ex.GetType().Name == "CurlException") // CurlException when use libcurl (dotnet core) + { + // Valid curl error code should in range (0, 93] + if (ex.HResult > 0 && ex.HResult < 94) + { + curlErrorCode = (CurlErrorCode)ex.HResult; + if (curlErrorCode == CurlErrorCode.CURLE_COULDNT_RESOLVE_PROXY || + curlErrorCode == CurlErrorCode.CURLE_COULDNT_RESOLVE_HOST || + curlErrorCode == CurlErrorCode.CURLE_COULDNT_CONNECT || + curlErrorCode == CurlErrorCode.CURLE_HTTP2 || + curlErrorCode == CurlErrorCode.CURLE_PARTIAL_FILE || + curlErrorCode == CurlErrorCode.CURLE_WRITE_ERROR || + curlErrorCode == CurlErrorCode.CURLE_UPLOAD_FAILED || + curlErrorCode == CurlErrorCode.CURLE_READ_ERROR || + curlErrorCode == CurlErrorCode.CURLE_OPERATION_TIMEDOUT || + curlErrorCode == CurlErrorCode.CURLE_INTERFACE_FAILED || + curlErrorCode == CurlErrorCode.CURLE_GOT_NOTHING || + curlErrorCode == CurlErrorCode.CURLE_SEND_ERROR || + curlErrorCode == CurlErrorCode.CURLE_RECV_ERROR) + { + return true; + } + } + } +#if !NETSTANDARD + else if (ex is System.Data.Services.Client.DataServiceRequestException || + ex is System.Data.Services.Client.DataServiceClientException) + { + // WCF exceptions + return true; + } +#endif + + return false; + } + + /// + /// Gets the HttpStatusCode which represents a throttling error. + /// + public const HttpStatusCode TooManyRequests = (HttpStatusCode)429; + } +} diff --git a/src/Sdk/Common/Common/VssPerformanceEventSource.cs b/src/Sdk/Common/Common/VssPerformanceEventSource.cs new file mode 100644 index 00000000000..dd65becb4af --- /dev/null +++ b/src/Sdk/Common/Common/VssPerformanceEventSource.cs @@ -0,0 +1,498 @@ +using System; +using System.Diagnostics.Tracing; + +namespace GitHub.Services.Common +{ + /// + /// Note: This is our perfview event source which is used for performance troubleshooting + /// Sadly, EventSource has few overloads so anything that isn't in http://msdn.microsoft.com/en-us/library/system.diagnostics.tracing.eventsource.writeevent.aspx + /// will cause a bunch of allocations - so we use manual interop for anything non trivial. + /// + /// + public sealed class VssPerformanceEventSource : EventSource + { + public static VssPerformanceEventSource Log = new VssPerformanceEventSource(); + + #region WriteEvent PInvoke Overrides + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u1, Guid u2, string st) + { + if (IsEnabled()) + { + st = st ?? String.Empty; + const int parameters = 3; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].Size = sizeof(Guid); + dataDesc[0].DataPointer = (IntPtr)(&u1); + dataDesc[1].Size = sizeof(Guid); + dataDesc[1].DataPointer = (IntPtr)(&u2); + dataDesc[2].Size = (st.Length + 1) * sizeof(char); + + fixed (char* pcst = st) + { + dataDesc[2].DataPointer = (IntPtr)pcst; + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u1, Guid u2, string st, long duration) + { + if (IsEnabled()) + { + st = st ?? String.Empty; + const int parameters = 4; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].Size = sizeof(Guid); + dataDesc[0].DataPointer = (IntPtr)(&u1); + dataDesc[1].Size = sizeof(Guid); + dataDesc[1].DataPointer = (IntPtr)(&u2); + dataDesc[2].Size = (st.Length + 1) * sizeof(char); + dataDesc[3].Size = sizeof(long); + dataDesc[3].DataPointer = (IntPtr)(&duration); + + fixed (char* pcst = st) + { + dataDesc[2].DataPointer = (IntPtr)pcst; + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u, string st) + { + if (IsEnabled()) + { + st = st ?? String.Empty; + const int parameters = 2; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&u); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st.Length + 1) * sizeof(char); + + fixed (char* pcSt = st) + { + dataDesc[1].DataPointer = (IntPtr)(pcSt); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u, string st, long duration) + { + if (IsEnabled()) + { + st = st ?? String.Empty; + const int parameters = 3; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&u); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st.Length + 1) * sizeof(char); + dataDesc[2].Size = sizeof(long); + dataDesc[2].DataPointer = (IntPtr)(&duration); + + fixed (char* pcSt = st) + { + dataDesc[1].DataPointer = (IntPtr)(pcSt); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u) + { + if (IsEnabled()) + { + EventData dataDesc = new EventData(); // this is a struct so no allocation here + + dataDesc.DataPointer = (IntPtr)(&u); + dataDesc.Size = sizeof(Guid); + WriteEventCore(eventId, 1, &dataDesc); + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u, long duration) + { + if (IsEnabled()) + { + const int parameters = 2; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&u); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].DataPointer = (IntPtr)(&duration); + dataDesc[1].Size = sizeof(long); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u1, string st1, DateTime dt1, DateTime dt2, Guid u2) // Guid uniqueIdentifier, string name, string validFrom, string validTo, Guid contextId + { + if (IsEnabled()) + { + st1 = st1 ?? String.Empty; + long ft1 = dt1.ToFileTimeUtc(); + long ft2 = dt2.ToFileTimeUtc(); + + const int parameters = 5; + + EventData* dataDesc = stackalloc EventData[parameters]; + dataDesc[0].DataPointer = (IntPtr)(&u1); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st1.Length + 1) * sizeof(char); + dataDesc[2].DataPointer = (IntPtr)(&ft1); + dataDesc[2].Size = sizeof(long); + dataDesc[3].DataPointer = (IntPtr)(&ft2); + dataDesc[3].Size = sizeof(long); + dataDesc[4].DataPointer = (IntPtr)(&u2); + dataDesc[4].Size = sizeof(Guid); + + fixed (char* pcst1 = st1) + { + dataDesc[1].DataPointer = (IntPtr)(pcst1); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid u1, string st1, string st2, string st3, Guid u2, long duration) // Guid uniqueIdentifier, string name, string validFrom, string validTo, Guid contextId + { + if (IsEnabled()) + { + st1 = st1 ?? String.Empty; + st2 = st2 ?? String.Empty; + st3 = st3 ?? String.Empty; + + const int parameters = 6; + + EventData* dataDesc = stackalloc EventData[parameters]; + dataDesc[0].DataPointer = (IntPtr)(&u1); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st1.Length + 1) * sizeof(char); + dataDesc[2].Size = (st2.Length + 1) * sizeof(char); + dataDesc[3].Size = (st3.Length + 1) * sizeof(char); + dataDesc[4].DataPointer = (IntPtr)(&u2); + dataDesc[4].Size = sizeof(Guid); + dataDesc[5].DataPointer = (IntPtr)(&duration); + dataDesc[5].Size = sizeof(long); + + fixed (char* pcst1 = st1, pcst2 = st2, pcst3 = st3) + { + dataDesc[1].DataPointer = (IntPtr)(pcst1); + dataDesc[2].DataPointer = (IntPtr)(pcst2); + dataDesc[3].DataPointer = (IntPtr)(pcst3); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid uniqueIdentifier, string st1, string st2, string st3) + { + if (IsEnabled()) + { + st1 = st1 ?? String.Empty; + st2 = st2 ?? String.Empty; + st3 = st3 ?? String.Empty; + + const int parameters = 4; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&uniqueIdentifier); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st1.Length + 1) * sizeof(char); + dataDesc[2].Size = (st2.Length + 1) * sizeof(char); + dataDesc[3].Size = (st3.Length + 1) * sizeof(char); + + fixed (char* pcst1 = st1, pcst2 = st2, pcst3 = st3) + { + dataDesc[1].DataPointer = (IntPtr)(pcst1); + dataDesc[2].DataPointer = (IntPtr)(pcst2); + dataDesc[3].DataPointer = (IntPtr)(pcst3); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid uniqueIdentifier, string st1, string st2, string st3, long duration) + { + if (IsEnabled()) + { + st1 = st1 ?? String.Empty; + st2 = st2 ?? String.Empty; + st3 = st3 ?? String.Empty; + + const int parameters = 5; + + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&uniqueIdentifier); + dataDesc[0].Size = sizeof(Guid); + + dataDesc[1].Size = (st1.Length + 1) * sizeof(char); + dataDesc[2].Size = (st2.Length + 1) * sizeof(char); + dataDesc[3].Size = (st3.Length + 1) * sizeof(char); + + dataDesc[4].DataPointer = (IntPtr)(&duration); + dataDesc[4].Size = sizeof(long); + + fixed (char* pcst1 = st1, pcst2 = st2, pcst3 = st3) + { + dataDesc[1].DataPointer = (IntPtr)(pcst1); + dataDesc[2].DataPointer = (IntPtr)(pcst2); + dataDesc[3].DataPointer = (IntPtr)(pcst3); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid uniqueIdentifier, string st1, string st2) + { + if (IsEnabled()) + { + st1 = st1 ?? String.Empty; + st2 = st2 ?? String.Empty; + + const int parameters = 3; + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&uniqueIdentifier); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st1.Length + 1) * sizeof(char); + dataDesc[2].Size = (st2.Length + 1) * sizeof(char); + + fixed (char* pcst1 = st1, pcst2 = st2) + { + dataDesc[1].DataPointer = (IntPtr)(pcst1); + dataDesc[2].DataPointer = (IntPtr)(pcst2); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, Guid uniqueIdentifier, string st1, string st2, long duration) + { + if (IsEnabled()) + { + st1 = st1 ?? String.Empty; + st2 = st2 ?? String.Empty; + + const int parameters = 4; + + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].DataPointer = (IntPtr)(&uniqueIdentifier); + dataDesc[0].Size = sizeof(Guid); + dataDesc[1].Size = (st1.Length + 1) * sizeof(char); + dataDesc[2].Size = (st2.Length + 1) * sizeof(char); + dataDesc[3].DataPointer = (IntPtr)(&duration); + dataDesc[3].Size = sizeof(long); + + fixed (char* pcst1 = st1, pcst2 = st2) + { + dataDesc[1].DataPointer = (IntPtr)(pcst1); + dataDesc[2].DataPointer = (IntPtr)(pcst2); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + + [NonEvent] + public unsafe void WriteEvent(int eventId, string st, int i1, long duration) + { + if (IsEnabled()) + { + const int parameters = 3; + st = st ?? String.Empty; + + EventData* dataDesc = stackalloc EventData[parameters]; + + dataDesc[0].Size = (st.Length + 1) * sizeof(char); + dataDesc[1].DataPointer = (IntPtr)(&i1); + dataDesc[1].Size = sizeof(Int32); + dataDesc[2].DataPointer = (IntPtr)(&duration); + dataDesc[2].Size = sizeof(int); + + fixed (char* pcst = st) + { + dataDesc[0].DataPointer = (IntPtr)(pcst); + + WriteEventCore(eventId, parameters, dataDesc); + } + } + } + #endregion + + public void MethodStart(Guid uniqueIdentifier, Guid hostId, string methodName) + { + WriteEvent(1, uniqueIdentifier, hostId, methodName); + } + + public void MethodStop(Guid uniqueIdentifier, Guid hostId, string methodName, long duration) + { + WriteEvent(2, uniqueIdentifier, hostId, methodName, duration); + } + + public void NotificationCallbackStart(Guid hostId, string callback) + { + WriteEvent(3, hostId, callback); + } + + public void NotificationCallbackStop(Guid hostId, string callback, long duration) + { + WriteEvent(4, hostId, callback, duration); + } + + public void TaskCallbackStart(Guid hostId, string callback) + { + WriteEvent(5, hostId, callback); + } + + public void TaskCallbackStop(Guid hostId, string callback, long duration) + { + WriteEvent(6, hostId, callback, duration); + } + + public void StopHostTaskStart(Guid hostId) + { + WriteEvent(7, hostId); + } + + public void StopHostTaskStop(Guid hostId, long duration) + { + WriteEvent(8, hostId, duration); + } + + public void RefreshSecurityTokenStart(Guid uniqueIdentifier, string name) + { + WriteEvent(9, uniqueIdentifier, name); + } + + public void RefreshSecurityTokenStop(Guid uniqueIdentifier, string name, DateTime validFrom, DateTime validTo, Guid contextId, long duration) + { + WriteEvent(10, uniqueIdentifier, name, validFrom, validTo, contextId, duration); + } + + public void SQLStart(Guid uniqueIdentifier, string query, string server, string databaseName) + { + WriteEvent(11, uniqueIdentifier, query, server, databaseName); + } + + public void SQLStop(Guid uniqueIdentifier, string query, string server, string databaseName, long duration) + { + WriteEvent(12, uniqueIdentifier, query, server, databaseName, duration); + } + + public void RESTStart(Guid uniqueIdentifier, string message) + { + WriteEvent(13, uniqueIdentifier, message); + } + + public void RESTStop(Guid uniqueIdentifier, Guid originalActivityId, string message, long duration) + { + WriteEvent(14, uniqueIdentifier, originalActivityId, message, duration); + } + + public void WindowsAzureStorageStart(Guid uniqueIdentifier, string accountName, string methodName) + { + WriteEvent(15, uniqueIdentifier, accountName, methodName); + } + + public void WindowsAzureStorageStop(Guid uniqueIdentifier, string accountName, string methodName, long duration) + { + WriteEvent(16, uniqueIdentifier, accountName, methodName, duration); + } + + public void LoadHostStart(Guid hostId) + { + WriteEvent(17, hostId); + } + + public void LoadHostStop(Guid hostId, long duration) + { + WriteEvent(18, hostId, duration); + } + + /// + /// This method is intentionally called Begin, not Start(), since it's a recursive event + /// Service Profiler cannot deal with recursive events unless you have the + /// [Event(EventActivityOptions.Recursive)] however that is not supported in 4.5 currently + /// + /// + /// + /// + public void CreateServiceInstanceBegin(Guid uniqueIdentifier, Guid hostId, string serviceType) + { + WriteEvent(19, uniqueIdentifier, hostId, serviceType); + } + + /// + /// This method is intentionally called Begin, not Start(), since it's a recursive event + /// Service Profiler cannot deal with recursive events unless you have the + /// [Event(EventActivityOptions.Recursive)] however that is not supported in 4.5 currently + /// + /// + /// + /// + /// + public void CreateServiceInstanceEnd(Guid uniqueIdentifier, Guid hostId, string serviceType, long duration) + { + WriteEvent(20, uniqueIdentifier, hostId, serviceType, duration); + } + + public void DetectedLockReentryViolation(string lockName) + { + WriteEvent(21, lockName); + } + + public void DetectedLockUsageViolation(string lockName, string locksHeld) + { + WriteEvent(22, lockName, locksHeld); + } + + public void RedisStart(Guid uniqueIdentifier, string operation, string ciArea, string cacheArea) + { + WriteEvent(23, uniqueIdentifier, operation, ciArea, cacheArea); + } + + public void RedisStop(Guid uniqueIdentifier, string operation, string ciArea, string cacheArea, long duration) + { + WriteEvent(24, uniqueIdentifier, operation, ciArea, cacheArea, duration); + } + + public void MessageBusSendBatchStart(Guid uniqueIdentifier, string messageBusName, int numberOfMessages) + { + WriteEvent(25, uniqueIdentifier, messageBusName, numberOfMessages); + } + + public void MessageBusSendBatchStop(Guid uniqueIdentifier, string messageBusName, int numberOfMessages, long duration) + { + WriteEvent(26, uniqueIdentifier, messageBusName, numberOfMessages, duration); + } + + } +} diff --git a/src/Sdk/Common/EmbeddedVersionInfo.cs b/src/Sdk/Common/EmbeddedVersionInfo.cs new file mode 100644 index 00000000000..ee036356736 --- /dev/null +++ b/src/Sdk/Common/EmbeddedVersionInfo.cs @@ -0,0 +1,39 @@ +using System; +using System.Runtime.CompilerServices; + +namespace GitHub.Services.Common +{ + [CompilerGenerated] + internal static class GeneratedVersionInfo + { + // Legacy values which preserve semantics from prior to the Assembly / File version split. + // See Toolsets\Version\Version.props for more details. + public const String MajorVersion = "16"; + public const String MinorVersion = "0"; + public const String BuildVersion = "65000"; + public const String PatchVersion = "0"; + public const String ProductVersion = MajorVersion + "." + MinorVersion; + + // Assembly version (i.e. strong name) + public const String AssemblyMajorVersion = "16"; + public const String AssemblyMinorVersion = "0"; + public const String AssemblyBuildVersion = "0"; + public const String AssemblyPatchVersion = "0"; + public const String AssemblyVersion = AssemblyMajorVersion + "." + AssemblyMinorVersion + "." + AssemblyBuildVersion + "." + AssemblyPatchVersion; + + // File version + public const String FileMajorVersion = "16"; + public const String FileMinorVersion = "255"; + public const String FileBuildVersion = "65000"; + public const String FilePatchVersion = "0"; + public const String FileVersion = FileMajorVersion + "." + FileMinorVersion + "." + FileBuildVersion + "." + FilePatchVersion; + + // Derived versions + public const String TfsMajorVersion = "8"; + public const String TfsMinorVersion = "0"; + public const String TfsProductVersion = TfsMajorVersion + "." + TfsMinorVersion; + + // On-premises TFS install folder + public const String TfsInstallDirectory = "Azure DevOps Server 2019"; + } +} diff --git a/src/Sdk/CoreWebApi/Core/ProjectClasses.cs b/src/Sdk/CoreWebApi/Core/ProjectClasses.cs new file mode 100644 index 00000000000..7b6de85160c --- /dev/null +++ b/src/Sdk/CoreWebApi/Core/ProjectClasses.cs @@ -0,0 +1,80 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.Common.Internal; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Patch; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Core.WebApi +{ + [GenerateAllConstants] + public enum ProjectState + { + /// + /// Project is in the process of being deleted. + /// + [EnumMember] + Deleting = 2, + + /// + /// Project is in the process of being created. + /// + [EnumMember] + New = 0, + + /// + /// Project is completely created and ready to use. + /// + [EnumMember] + WellFormed = 1, + + /// + /// Project has been queued for creation, but the process has not yet started. + /// + [EnumMember] + CreatePending = 3, + + /// + /// All projects regardless of state. + /// + [EnumMember] + All = -1, // Used for filtering. + + /// + /// Project has not been changed. + /// + [EnumMember] + Unchanged = -2, // Used for updating projects. + + /// + /// Project has been deleted. + /// + [EnumMember] + Deleted = 4, // Used for the project history. + } + + public enum ProjectVisibility // Stored as a TINYINT + { + [ClientInternalUseOnly] + Unchanged = -1, // Used for updating projects. + /// + /// The project is only visible to users with explicit access. + /// + Private = 0, + /// + /// Enterprise level project visibility + /// + [ClientInternalUseOnly(omitFromTypeScriptDeclareFile: false)] + Organization = 1, + /// + /// The project is visible to all. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + Public = 2, + [ClientInternalUseOnly] + SystemPrivate = 3 // Soft-deleted projects + } +} diff --git a/src/Sdk/CoreWebApi/Core/TeamProjectReference.cs b/src/Sdk/CoreWebApi/Core/TeamProjectReference.cs new file mode 100644 index 00000000000..a46e880f7ef --- /dev/null +++ b/src/Sdk/CoreWebApi/Core/TeamProjectReference.cs @@ -0,0 +1,105 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Core.WebApi +{ + /// + /// Represents a shallow reference to a TeamProject. + /// + [DataContract] + public class TeamProjectReference : ISecuredObject + { + /// + /// Default constructor to ensure we set up the project state correctly for serialization. + /// + public TeamProjectReference() + { + State = ProjectState.Unchanged; + Visibility = ProjectVisibility.Unchanged; + } + + /// + /// Project identifier. + /// + [DataMember(Order = 0, EmitDefaultValue = false)] + public Guid Id { get; set; } + + /// + /// Project abbreviation. + /// + [DataMember(Order = 1, EmitDefaultValue = false)] + public string Abbreviation { get; set; } + + /// + /// Project name. + /// + [DataMember(Order = 2, EmitDefaultValue = false)] + public string Name { get; set; } + + /// + /// The project's description (if any). + /// + [DataMember(Order = 3, EmitDefaultValue = false)] + public string Description { get; set; } + + /// + /// Url to the full version of the object. + /// + [DataMember(Order = 4, EmitDefaultValue = false)] + public string Url { get; set; } + + /// + /// Project state. + /// + [DataMember(Order = 5)] + public ProjectState State { get; set; } + + /// + /// Project revision. + /// + [DataMember(Order = 6, EmitDefaultValue = false)] + public Int64 Revision { get; set; } + + /// + /// Project visibility. + /// + [DataMember(Order = 7)] + public ProjectVisibility Visibility { get; set; } + + /// + /// Url to default team identity image. + /// + [DataMember(Order = 8, EmitDefaultValue = false)] + public String DefaultTeamImageUrl { get; set; } + + /// + /// Project last update time. + /// + [DataMember(Order = 9)] + public DateTime LastUpdateTime { get; set; } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => NamespaceId; + + int ISecuredObject.RequiredPermissions => RequiredPermissions; + + string ISecuredObject.GetToken() + { + return GetToken(); + } + + protected virtual Guid NamespaceId => TeamProjectSecurityConstants.NamespaceId; + + protected virtual int RequiredPermissions => TeamProjectSecurityConstants.GenericRead; + + protected virtual string GetToken() + { + // WE DON'T CARE THIS FOR NOW + return TeamProjectSecurityConstants.GetToken(Id.ToString("D")); + } + + #endregion + } +} diff --git a/src/Sdk/DTContracts/Contracts/AuthorizationHeader.cs b/src/Sdk/DTContracts/Contracts/AuthorizationHeader.cs new file mode 100644 index 00000000000..088d919a57d --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/AuthorizationHeader.cs @@ -0,0 +1,16 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Common.Contracts +{ + [DataContract] + public class AuthorizationHeader : BaseSecuredObject + { + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Value { get; set; } + } +} diff --git a/src/Sdk/DTContracts/Contracts/DataSourceBinding.cs b/src/Sdk/DTContracts/Contracts/DataSourceBinding.cs new file mode 100644 index 00000000000..b8e55c285f9 --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/DataSourceBinding.cs @@ -0,0 +1,149 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Common.Contracts +{ + /// + /// Represents binding of data source for the service endpoint request. + /// + [DataContract] + public class DataSourceBindingBase : BaseSecuredObject + { + public DataSourceBindingBase() + { + } + + protected DataSourceBindingBase(DataSourceBindingBase inputDefinitionToClone) + : this(inputDefinitionToClone, null) + { + } + + protected DataSourceBindingBase(DataSourceBindingBase inputDefinitionToClone, ISecuredObject securedObject) + : base(securedObject) + { + this.DataSourceName = inputDefinitionToClone.DataSourceName; + this.EndpointId = inputDefinitionToClone.EndpointId; + this.Target = inputDefinitionToClone.Target; + this.ResultTemplate = inputDefinitionToClone.ResultTemplate; + this.EndpointUrl = inputDefinitionToClone.EndpointUrl; + this.ResultSelector = inputDefinitionToClone.ResultSelector; + this.RequestVerb = inputDefinitionToClone.RequestVerb; + this.RequestContent = inputDefinitionToClone.RequestContent; + this.CallbackContextTemplate = inputDefinitionToClone.CallbackContextTemplate; + this.CallbackRequiredTemplate = inputDefinitionToClone.CallbackRequiredTemplate; + this.InitialContextTemplate = inputDefinitionToClone.InitialContextTemplate; + inputDefinitionToClone.Parameters.Copy(this.Parameters); + this.CloneHeaders(inputDefinitionToClone.Headers); + } + + /// + /// Gets or sets the name of the data source. + /// + [DataMember(EmitDefaultValue = false)] + public string DataSourceName { get; set; } + + /// + /// Gets or sets the parameters for the data source. + /// + [DataMember(EmitDefaultValue = false)] + public Dictionary Parameters + { + get + { + if (m_parameters == null) + { + m_parameters = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_parameters; + } + } + + public DataSourceBindingBase Clone(ISecuredObject securedObject) + { + return new DataSourceBindingBase(this, securedObject); + } + + private void CloneHeaders(List headers) + { + if (headers == null) + { + return; + } + + this.Headers = headers.Select(header => new AuthorizationHeader { Name = header.Name, Value = header.Value }).ToList(); + } + + /// + /// Gets or sets the endpoint Id. + /// + [DataMember(EmitDefaultValue = false)] + public String EndpointId { get; set; } + + /// + /// Gets or sets the target of the data source. + /// + [DataMember(EmitDefaultValue = false)] + public String Target { get; set; } + + /// + /// Gets or sets the result template. + /// + [DataMember(EmitDefaultValue = false)] + public String ResultTemplate { get; set; } + + /// + /// Gets or sets http request verb + /// + [DataMember(EmitDefaultValue = false)] + public String RequestVerb { get; set; } + + /// + /// Gets or sets http request body + /// + [DataMember(EmitDefaultValue = false)] + public String RequestContent { get; set; } + + /// + /// Gets or sets the url of the service endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public String EndpointUrl { get; set; } + + /// + /// Gets or sets the result selector. + /// + [DataMember(EmitDefaultValue = false)] + public String ResultSelector { get; set; } + + /// + /// Pagination format supported by this data source(ContinuationToken/SkipTop). + /// + [DataMember(EmitDefaultValue = false)] + public String CallbackContextTemplate { get; set; } + + /// + /// Subsequent calls needed? + /// + [DataMember(EmitDefaultValue = false)] + public String CallbackRequiredTemplate { get; set; } + + /// + /// Defines the initial value of the query params + /// + [DataMember(EmitDefaultValue = false)] + public String InitialContextTemplate { get; set; } + + /// + /// Gets or sets the authorization headers. + /// + [DataMember(EmitDefaultValue = false)] + public List Headers { get; set; } + + private Dictionary m_parameters; + } +} diff --git a/src/Sdk/DTContracts/Contracts/ProcessParameters.cs b/src/Sdk/DTContracts/Contracts/ProcessParameters.cs new file mode 100644 index 00000000000..7e7798701b8 --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/ProcessParameters.cs @@ -0,0 +1,163 @@ +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Common.Contracts +{ + [DataContract] + public class ProcessParameters : BaseSecuredObject + { + public ProcessParameters() + : this(null) + { + } + + public ProcessParameters(ISecuredObject securedObject) + : this(null, securedObject) + { + } + + private ProcessParameters(ProcessParameters toClone, ISecuredObject securedObject) + : base(securedObject) + { + if (toClone != null) + { + if (toClone.Inputs.Count > 0) + { + Inputs.AddRange(toClone.Inputs.Select(i => i.Clone(securedObject))); + } + + if (toClone.SourceDefinitions.Count > 0) + { + SourceDefinitions.AddRange(toClone.SourceDefinitions.Select(sd => sd.Clone(securedObject))); + } + + if (toClone.DataSourceBindings.Count > 0) + { + DataSourceBindings.AddRange(toClone.DataSourceBindings.Select(dsb => dsb.Clone(securedObject))); + } + } + } + + public IList Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new List(); + } + return m_inputs; + } + } + + public IList SourceDefinitions + { + get + { + if (m_sourceDefinitions == null) + { + m_sourceDefinitions = new List(); + } + return m_sourceDefinitions; + } + } + + public IList DataSourceBindings + { + get + { + if (m_dataSourceBindings == null) + { + m_dataSourceBindings = new List(); + } + return m_dataSourceBindings; + } + } + + public override int GetHashCode() + { + return base.GetHashCode(); + } + + public override bool Equals(object obj) + { + var processParameters2 = obj as ProcessParameters; + if (processParameters2 == null) + { + return false; + } + + if (this.Inputs == null && processParameters2.Inputs == null) + { + return true; + } + + if ((this.Inputs != null && processParameters2.Inputs == null) + || (this.Inputs == null && processParameters2.Inputs != null)) + { + return false; + } + + if (this.Inputs.Count != processParameters2.Inputs.Count) + { + return false; + } + + var orderedProcessParameters1 = this.Inputs.Where(i => i != null).OrderBy(i => i.Name); + var orderedProcessParameters2 = processParameters2.Inputs.Where(i => i != null).OrderBy(i => i.Name); + + if (!orderedProcessParameters1.OrderBy(i => i.Name).SequenceEqual(orderedProcessParameters2)) + { + return false; + } + + return true; + } + + public ProcessParameters Clone(ISecuredObject securedObject = null) + { + return new ProcessParameters(this, securedObject); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedInputs, ref m_inputs, true); + SerializationHelper.Copy(ref m_serializedSourceDefinitions, ref m_sourceDefinitions, true); + SerializationHelper.Copy(ref m_serializedDataSourceBindings, ref m_dataSourceBindings, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_inputs, ref m_serializedInputs); + SerializationHelper.Copy(ref m_sourceDefinitions, ref m_serializedSourceDefinitions); + SerializationHelper.Copy(ref m_dataSourceBindings, ref m_serializedDataSourceBindings); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedInputs = null; + m_serializedSourceDefinitions = null; + m_serializedDataSourceBindings = null; + } + + [DataMember(Name = "Inputs", EmitDefaultValue = false)] + private List m_serializedInputs; + + [DataMember(Name = "SourceDefinitions", EmitDefaultValue = false)] + private List m_serializedSourceDefinitions; + + [DataMember(Name = "DataSourceBindings", EmitDefaultValue = false)] + private List m_serializedDataSourceBindings; + + private List m_inputs; + private List m_sourceDefinitions; + private List m_dataSourceBindings; + } +} diff --git a/src/Sdk/DTContracts/Contracts/TaskInputDefinition.cs b/src/Sdk/DTContracts/Contracts/TaskInputDefinition.cs new file mode 100644 index 00000000000..49e5091b615 --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/TaskInputDefinition.cs @@ -0,0 +1,254 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Common.Contracts +{ + [DataContract] + public class TaskInputDefinitionBase : BaseSecuredObject + { + public TaskInputDefinitionBase() + { + InputType = TaskInputType.String; + DefaultValue = String.Empty; + Required = false; + HelpMarkDown = String.Empty; + } + + protected TaskInputDefinitionBase(TaskInputDefinitionBase inputDefinitionToClone) + : this(inputDefinitionToClone, null) + { + } + + protected TaskInputDefinitionBase(TaskInputDefinitionBase inputDefinitionToClone, ISecuredObject securedObject) + : base(securedObject) + { + this.DefaultValue = inputDefinitionToClone.DefaultValue; + this.InputType = inputDefinitionToClone.InputType; + this.Label = inputDefinitionToClone.Label; + this.Name = inputDefinitionToClone.Name; + this.Required = inputDefinitionToClone.Required; + this.HelpMarkDown = inputDefinitionToClone.HelpMarkDown; + this.VisibleRule = inputDefinitionToClone.VisibleRule; + this.GroupName = inputDefinitionToClone.GroupName; + + if (inputDefinitionToClone.Validation != null) + { + this.Validation = inputDefinitionToClone.Validation.Clone(securedObject); + } + + if (inputDefinitionToClone.m_aliases != null) + { + this.m_aliases = new List(inputDefinitionToClone.m_aliases); + } + + if (inputDefinitionToClone.m_options != null) + { + this.m_options = new Dictionary(inputDefinitionToClone.m_options); + } + if (inputDefinitionToClone.m_properties != null) + { + this.m_properties = new Dictionary(inputDefinitionToClone.m_properties); + } + } + + public IList Aliases + { + get + { + if (m_aliases == null) + { + m_aliases = new List(); + } + return m_aliases; + } + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Label + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String DefaultValue + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean Required + { + get; + set; + } + + [DataMember(Name = "Type")] + public String InputType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String HelpMarkDown + { + get; + set; + } + + // VisibleRule should specify the condition at which this input is to be shown/displayed + // Typical format is "NAME OF THE DEPENDENT INPUT = VALUE TOBE BOUND" + [DataMember(EmitDefaultValue = false)] + public string VisibleRule + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public string GroupName + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskInputValidation Validation + { + get; + set; + } + + public Dictionary Options + { + get + { + if (m_options == null) + { + m_options = new Dictionary(); + } + return m_options; + } + } + + public Dictionary Properties + { + get + { + if (m_properties == null) + { + m_properties = new Dictionary(); + } + return m_properties; + } + } + + public virtual TaskInputDefinitionBase Clone( + ISecuredObject securedObject) + { + return new TaskInputDefinitionBase(this, securedObject); + } + + public override int GetHashCode() + { + return this.Name.GetHashCode() ^ this.DefaultValue.GetHashCode() ^ this.Label.GetHashCode(); + } + + public override bool Equals(object obj) + { + var taskInput2 = obj as TaskInputDefinitionBase; + if (taskInput2 == null + || !string.Equals(InputType, taskInput2.InputType, StringComparison.OrdinalIgnoreCase) + || !string.Equals(Label, taskInput2.Label, StringComparison.OrdinalIgnoreCase) + || !string.Equals(Name, taskInput2.Name, StringComparison.OrdinalIgnoreCase) + || !string.Equals(GroupName, taskInput2.GroupName, StringComparison.OrdinalIgnoreCase) + || !string.Equals(DefaultValue, taskInput2.DefaultValue, StringComparison.OrdinalIgnoreCase) + || !string.Equals(HelpMarkDown, taskInput2.HelpMarkDown, StringComparison.OrdinalIgnoreCase) + || !string.Equals(VisibleRule, taskInput2.VisibleRule, StringComparison.OrdinalIgnoreCase) + || !this.Required.Equals(taskInput2.Required)) + { + return false; + } + + if (!AreListsEqual(Aliases, taskInput2.Aliases) + || !AreDictionariesEqual(Properties, taskInput2.Properties) + || !AreDictionariesEqual(Options, taskInput2.Options)) + { + return false; + } + + if ((Validation != null && taskInput2.Validation == null) + || (Validation == null && taskInput2.Validation != null) + || ((Validation != null && taskInput2.Validation != null) + && !Validation.Equals(taskInput2.Validation))) + { + return false; + } + + return true; + } + + private bool AreDictionariesEqual(Dictionary input1, Dictionary input2) + { + if (input1 == null && input2 == null) + { + return true; + } + + if ((input1 == null && input2 != null) + || (input1 != null && input2 == null) + || (input1.Count != input2.Count)) + { + return false; + } + + foreach (var key in input1.Keys) + { + if (!(input2.ContainsKey(key) && String.Equals(input1[key], input2[key], StringComparison.OrdinalIgnoreCase))) + { + return false; + } + } + + return true; + } + + private Boolean AreListsEqual(IList list1, IList list2) + { + if (list1.Count != list2.Count) + { + return false; + } + + for (Int32 i = 0; i < list1.Count; i++) + { + if (!String.Equals(list1[i], list2[i], StringComparison.OrdinalIgnoreCase)) + { + return false; + } + } + + return true; + } + + [DataMember(Name = "Aliases", EmitDefaultValue = false)] + private List m_aliases; + + [DataMember(Name = "Options", EmitDefaultValue = false)] + private Dictionary m_options; + + [DataMember(Name = "Properties", EmitDefaultValue = false)] + private Dictionary m_properties; + } +} diff --git a/src/Sdk/DTContracts/Contracts/TaskInputType.cs b/src/Sdk/DTContracts/Contracts/TaskInputType.cs new file mode 100644 index 00000000000..44f73df39de --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/TaskInputType.cs @@ -0,0 +1,14 @@ +using System.Runtime.Serialization; +using System; + +namespace GitHub.DistributedTask.Common.Contracts +{ + public static class TaskInputType + { + public const String String = "string"; + public const String Repository = "repository"; + public const String Boolean = "boolean"; + public const String KeyValue = "keyvalue"; + public const String FilePath = "filepath"; + } +} diff --git a/src/Sdk/DTContracts/Contracts/TaskInputValidation.cs b/src/Sdk/DTContracts/Contracts/TaskInputValidation.cs new file mode 100644 index 00000000000..839e7bf029d --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/TaskInputValidation.cs @@ -0,0 +1,59 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Common.Contracts +{ + [DataContract] + public class TaskInputValidation : BaseSecuredObject + { + public TaskInputValidation() + { + } + + private TaskInputValidation(TaskInputValidation toClone, ISecuredObject securedObject) + : base(securedObject) + { + if (toClone != null) + { + this.Expression = toClone.Expression; + this.Message = toClone.Message; + } + } + + /// + /// Conditional expression + /// + [DataMember(EmitDefaultValue = false)] + public String Expression + { + get; + set; + } + + /// + /// Message explaining how user can correct if validation fails + /// + [DataMember(EmitDefaultValue = false)] + public String Message + { + get; + set; + } + + public override int GetHashCode() + { + return Expression.GetHashCode() ^ Message.GetHashCode(); + } + + public TaskInputValidation Clone() + { + return this.Clone(null); + } + + public TaskInputValidation Clone(ISecuredObject securedObject) + { + return new TaskInputValidation(this, securedObject); + } + } +} diff --git a/src/Sdk/DTContracts/Contracts/TaskSourceDefinition.cs b/src/Sdk/DTContracts/Contracts/TaskSourceDefinition.cs new file mode 100644 index 00000000000..20407f51dc3 --- /dev/null +++ b/src/Sdk/DTContracts/Contracts/TaskSourceDefinition.cs @@ -0,0 +1,74 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Common.Contracts +{ + [DataContract] + public class TaskSourceDefinitionBase : BaseSecuredObject + { + public TaskSourceDefinitionBase() + { + AuthKey = String.Empty; + Endpoint = String.Empty; + Selector = String.Empty; + Target = String.Empty; + KeySelector = String.Empty; + } + + protected TaskSourceDefinitionBase(TaskSourceDefinitionBase inputDefinitionToClone) + : this(inputDefinitionToClone, null) + { + } + + protected TaskSourceDefinitionBase(TaskSourceDefinitionBase inputDefinitionToClone, ISecuredObject securedObject) + : base(securedObject) + { + this.Endpoint = inputDefinitionToClone.Endpoint; + this.Target = inputDefinitionToClone.Target; + this.AuthKey = inputDefinitionToClone.AuthKey; + this.Selector = inputDefinitionToClone.Selector; + this.KeySelector = inputDefinitionToClone.KeySelector; + } + + public virtual TaskSourceDefinitionBase Clone(ISecuredObject securedObject) + { + return new TaskSourceDefinitionBase(this, securedObject); + } + + [DataMember(EmitDefaultValue = false)] + public String Endpoint + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Target + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String AuthKey + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Selector + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String KeySelector + { + get; + set; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/AndNode.cs b/src/Sdk/DTExpressions/Expressions/AndNode.cs new file mode 100644 index 00000000000..47411f8c759 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/AndNode.cs @@ -0,0 +1,22 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class AndNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + foreach (ExpressionNode parameter in Parameters) + { + if (!parameter.EvaluateBoolean(context)) + { + return false; + } + } + + return true; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CoalesceNode.cs b/src/Sdk/DTExpressions/Expressions/CoalesceNode.cs new file mode 100644 index 00000000000..87ff6ff6b82 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CoalesceNode.cs @@ -0,0 +1,31 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class CoalesceNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + EvaluationResult result = null; + foreach (ExpressionNode parameter in Parameters) + { + result = parameter.Evaluate(context); + if (result.Kind == ValueKind.Null) + { + continue; + } + + if (result.Kind == ValueKind.String && String.IsNullOrEmpty(result.Value as String)) + { + continue; + } + + break; + } + + return result?.Value; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JArrayAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JArrayAccessor.cs new file mode 100644 index 00000000000..3ee25afa99d --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JArrayAccessor.cs @@ -0,0 +1,31 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class JArrayAccessor : IReadOnlyArray + { + public JArrayAccessor(JArray jarray) + { + m_jarray = jarray; + } + + public Int32 Count => m_jarray.Count; + + public Object this[Int32 index] => m_jarray[index]; + + public IEnumerator GetEnumerator() + { + return m_jarray.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return m_jarray.GetEnumerator(); + } + + private readonly JArray m_jarray; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JObjectAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JObjectAccessor.cs new file mode 100644 index 00000000000..4484812fabc --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JObjectAccessor.cs @@ -0,0 +1,56 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class JObjectAccessor : IReadOnlyObject + { + public JObjectAccessor(JObject jobject) + { + m_jobject = jobject; + } + + public Int32 Count => m_jobject.Count; + + public IEnumerable Keys => (m_jobject as IDictionary).Keys; + + // This uses Select. Calling .Values directly throws an exception. + public IEnumerable Values => (m_jobject as IDictionary).Select(x => x.Value); + + public Object this[String key] => m_jobject[key]; + + public Boolean ContainsKey(String key) + { + return (m_jobject as IDictionary).ContainsKey(key); + } + + public IEnumerator> GetEnumerator() + { + return (m_jobject as IDictionary).Select(x => new KeyValuePair(x.Key, x.Value)).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return (m_jobject as IDictionary).Select(x => new KeyValuePair(x.Key, x.Value)).GetEnumerator(); + } + + public Boolean TryGetValue( + String key, + out Object value) + { + if ((m_jobject as IDictionary).TryGetValue(key, out JToken val)) + { + value = val; + return true; + } + + value = null; + return false; + } + + private readonly JObject m_jobject; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonDictionaryContractAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonDictionaryContractAccessor.cs new file mode 100644 index 00000000000..9b18219859a --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonDictionaryContractAccessor.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using Newtonsoft.Json.Serialization; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class JsonDictionaryContractAccessor : IReadOnlyObject + { + public JsonDictionaryContractAccessor( + JsonDictionaryContract contract, + Object obj) + { + m_contract = contract; + m_obj = obj; + } + + public Int32 Count + { + get + { + var genericMethod = s_getCountTemplate.Value.MakeGenericMethod(m_contract.DictionaryValueType); + return (Int32)genericMethod.Invoke(null, new[] { m_obj }); + } + } + + public IEnumerable Keys + { + get + { + var genericMethod = s_getKeysTemplate.Value.MakeGenericMethod(m_contract.DictionaryValueType); + return genericMethod.Invoke(null, new[] { m_obj }) as IEnumerable; + } + } + + public IEnumerable Values => Keys.Select(x => this[x]); + + public Object this[String key] + { + get + { + if (TryGetValue(key, out Object value)) + { + return value; + } + + throw new KeyNotFoundException(ExpressionResources.KeyNotFound(key)); + } + } + + public Boolean ContainsKey(String key) + { + return TryGetValue(key, out _); + } + + public IEnumerator> GetEnumerator() + { + return Keys.Select(x => new KeyValuePair(x, this[x])).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return Keys.Select(x => new KeyValuePair(x, this[x])).GetEnumerator(); + } + + public Boolean TryGetValue( + String key, + out Object value) + { + var genericMethod = s_tryGetValueTemplate.Value.MakeGenericMethod(m_contract.DictionaryValueType); + var tuple = genericMethod.Invoke(null, new[] { m_obj, key }) as Tuple; + value = tuple.Item2; + return tuple.Item1; + } + + private static Int32 GetCount(IDictionary dictionary) + { + return dictionary.Count; + } + + private static IEnumerable GetKeys(IDictionary dictionary) + { + return dictionary.Keys; + } + + private static Tuple TryGetValue( + IDictionary dictionary, + String key) + { + if (dictionary.TryGetValue(key, out TValue value)) + { + return new Tuple(true, value); + } + + return new Tuple(false, null); + } + + private static Lazy s_getCountTemplate = new Lazy(() => typeof(JsonDictionaryContractAccessor).GetTypeInfo().GetMethod(nameof(GetCount), BindingFlags.NonPublic | BindingFlags.Static)); + private static Lazy s_getKeysTemplate = new Lazy(() => typeof(JsonDictionaryContractAccessor).GetTypeInfo().GetMethod(nameof(GetKeys), BindingFlags.NonPublic | BindingFlags.Static)); + private static Lazy s_tryGetValueTemplate = new Lazy(() => typeof(JsonDictionaryContractAccessor).GetTypeInfo().GetMethod(nameof(TryGetValue), BindingFlags.NonPublic | BindingFlags.Static)); + private readonly JsonDictionaryContract m_contract; + private readonly Object m_obj; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonObjectContractAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonObjectContractAccessor.cs new file mode 100644 index 00000000000..56928166fe6 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/JsonObjectContractAccessor.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class JsonObjectContractAccessor : IReadOnlyObject + { + public JsonObjectContractAccessor( + JsonObjectContract contract, + Object obj) + { + m_contract = contract; + m_obj = obj; + } + + public Int32 Count => GetProperties().Count(); + + public IEnumerable Keys => GetProperties().Select(x => x.PropertyName); + + public IEnumerable Values => GetProperties().Select(x => x.ValueProvider.GetValue(m_obj)); + + public Object this[String key] + { + get + { + if (TryGetValue(key, out Object value)) + { + return value; + } + + throw new KeyNotFoundException(ExpressionResources.KeyNotFound(key)); + } + } + + public Boolean ContainsKey(String key) + { + return TryGetProperty(key, out _); + } + + public IEnumerator> GetEnumerator() + { + return Keys.Select(x => new KeyValuePair(x, this[x])).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return Keys.Select(x => new KeyValuePair(x, this[x])).GetEnumerator(); + } + + public Boolean TryGetValue( + String key, + out Object value) + { + if (TryGetProperty(key, out JsonProperty property)) + { + value = property.ValueProvider.GetValue(m_obj); + return true; + } + + value = null; + return false; + } + + private IEnumerable GetProperties() + { + return m_contract.Properties.Where(x => !x.Ignored); + } + + private Boolean TryGetProperty( + String key, + out JsonProperty property) + { + property = m_contract.Properties.GetClosestMatchProperty(key); + if (property != null && !property.Ignored) + { + return true; + } + + property = null; + return false; + } + + private readonly JsonObjectContract m_contract; + private readonly Object m_obj; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ListOfObjectAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ListOfObjectAccessor.cs new file mode 100644 index 00000000000..2bf375d1274 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ListOfObjectAccessor.cs @@ -0,0 +1,30 @@ +using System; +using System.Collections; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class ListOfObjectAccessor : IReadOnlyArray + { + public ListOfObjectAccessor(IList list) + { + m_list = list; + } + + public Int32 Count => m_list.Count; + + public Object this[Int32 index] => m_list[index]; + + public IEnumerator GetEnumerator() + { + return m_list.GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return m_list.GetEnumerator(); + } + + private readonly IList m_list; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringObjectAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringObjectAccessor.cs new file mode 100644 index 00000000000..c1a1bf0bf4a --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringObjectAccessor.cs @@ -0,0 +1,37 @@ +using System; +using System.Collections; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class ReadOnlyDictionaryOfStringObjectAccessor : IReadOnlyObject + { + public ReadOnlyDictionaryOfStringObjectAccessor(IReadOnlyDictionary dictionary) + { + m_dictionary = dictionary; + } + + public Int32 Count => m_dictionary.Count; + + public IEnumerable Keys => m_dictionary.Keys; + + public IEnumerable Values => m_dictionary.Values; + + public Object this[String key] => m_dictionary[key]; + + public Boolean ContainsKey(String key) => m_dictionary.ContainsKey(key); + + public IEnumerator> GetEnumerator() => m_dictionary.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => m_dictionary.GetEnumerator(); + + public Boolean TryGetValue( + String key, + out Object value) + { + return m_dictionary.TryGetValue(key, out value); + } + + private readonly IReadOnlyDictionary m_dictionary; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringStringAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringStringAccessor.cs new file mode 100644 index 00000000000..5421608cba0 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyDictionaryOfStringStringAccessor.cs @@ -0,0 +1,54 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class ReadOnlyDictionaryOfStringStringAccessor : IReadOnlyObject + { + public ReadOnlyDictionaryOfStringStringAccessor(IReadOnlyDictionary dictionary) + { + m_dictionary = dictionary; + } + + public Int32 Count => m_dictionary.Count; + + public IEnumerable Keys => m_dictionary.Keys; + + public IEnumerable Values => m_dictionary.Values.OfType(); + + public Object this[String key] => m_dictionary[key]; + + public Boolean ContainsKey(String key) + { + return m_dictionary.ContainsKey(key); + } + + public IEnumerator> GetEnumerator() + { + return m_dictionary.Select(x => new KeyValuePair(x.Key, x.Value)).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return m_dictionary.Select(x => new KeyValuePair(x.Key, x.Value)).GetEnumerator(); + } + + public Boolean TryGetValue( + String key, + out Object value) + { + if (m_dictionary.TryGetValue(key, out String val)) + { + value = val; + return true; + } + + value = default; + return false; + } + + private readonly IReadOnlyDictionary m_dictionary; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyListOfObjectAccessor.cs b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyListOfObjectAccessor.cs new file mode 100644 index 00000000000..74b56bd79c8 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/CollectionAccessors/ReadOnlyListOfObjectAccessor.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ + internal sealed class ReadOnlyListOfObjectAccessor : IReadOnlyArray + { + public ReadOnlyListOfObjectAccessor(IReadOnlyList list) + { + m_list = list; + } + + public Int32 Count => m_list.Count; + + public Object this[Int32 index] => m_list[index]; + + public IEnumerator GetEnumerator() => m_list.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => m_list.GetEnumerator(); + + private readonly IReadOnlyList m_list; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ContainerNode.cs b/src/Sdk/DTExpressions/Expressions/ContainerNode.cs new file mode 100644 index 00000000000..372304d0f22 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ContainerNode.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Reflection; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ContainerNode : ExpressionNode + { + public IReadOnlyList Parameters => m_parameters.AsReadOnly(); + + public void AddParameter(ExpressionNode node) + { + m_parameters.Add(node); + node.Container = this; + } + + public void ReplaceParameter(Int32 index, ExpressionNode node) + { + m_parameters[index] = node; + node.Container = this; + } + + public override IEnumerable GetParameters() + { + List matched = new List(); + Queue parameters = new Queue(this.Parameters); + + while (parameters.Count > 0) + { + var parameter = parameters.Dequeue(); + if (typeof(T).GetTypeInfo().IsAssignableFrom(parameter.GetType().GetTypeInfo())) + { + matched.Add((T)parameter); + } + + foreach (var childParameter in parameter.GetParameters()) + { + parameters.Enqueue(childParameter); + } + } + + return matched; + } + + private readonly List m_parameters = new List(); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ContainsNode.cs b/src/Sdk/DTExpressions/Expressions/ContainsNode.cs new file mode 100644 index 00000000000..eca9e2c8528 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ContainsNode.cs @@ -0,0 +1,16 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class ContainsNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + String left = Parameters[0].EvaluateString(context) as String ?? String.Empty; + String right = Parameters[1].EvaluateString(context) as String ?? String.Empty; + return left.IndexOf(right, StringComparison.OrdinalIgnoreCase) >= 0; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ContainsValueNode.cs b/src/Sdk/DTExpressions/Expressions/ContainsValueNode.cs new file mode 100644 index 00000000000..d86de2e149a --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ContainsValueNode.cs @@ -0,0 +1,46 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class ContainsValueNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override object EvaluateCore(EvaluationContext context) + { + EvaluationResult left = Parameters[0].Evaluate(context); + + if (left.TryGetCollectionInterface(out Object collection)) + { + EvaluationResult right = Parameters[1].Evaluate(context); + + if (collection is IReadOnlyArray array) + { + foreach (var item in array) + { + var itemResult = EvaluationResult.CreateIntermediateResult(context, item, out _); + + if (right.Equals(context, itemResult)) + { + return true; + } + } + } + else if (collection is IReadOnlyObject obj) + { + foreach (var value in obj.Values) + { + var valueResult = EvaluationResult.CreateIntermediateResult(context, value, out _); + + if (right.Equals(context, valueResult)) + { + return true; + } + } + } + } + + return false; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ConversionResult.cs b/src/Sdk/DTExpressions/Expressions/ConversionResult.cs new file mode 100644 index 00000000000..018f65d0208 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ConversionResult.cs @@ -0,0 +1,19 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public struct ConversionResult + { + /// + /// Result object after the conversion + /// + public Object Result; + + /// + /// Memory overhead for the result object + /// + public ResultMemory ResultMemory; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EndsWithNode.cs b/src/Sdk/DTExpressions/Expressions/EndsWithNode.cs new file mode 100644 index 00000000000..b09a561af7b --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EndsWithNode.cs @@ -0,0 +1,16 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class EndsWithNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + String left = Parameters[0].EvaluateString(context) ?? String.Empty; + String right = Parameters[1].EvaluateString(context) ?? String.Empty; + return left.EndsWith(right, StringComparison.OrdinalIgnoreCase); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EqualNode.cs b/src/Sdk/DTExpressions/Expressions/EqualNode.cs new file mode 100644 index 00000000000..2361c5c3daa --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EqualNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class EqualNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Parameters[0].Evaluate(context).Equals(context, Parameters[1].Evaluate(context)); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EvaluationContext.cs b/src/Sdk/DTExpressions/Expressions/EvaluationContext.cs new file mode 100644 index 00000000000..1d339c928f1 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EvaluationContext.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Logging; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EvaluationContext + { + internal EvaluationContext( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options, + ExpressionNode node) + { + ArgumentUtility.CheckForNull(trace, nameof(trace)); + ArgumentUtility.CheckForNull(secretMasker, nameof(secretMasker)); + Trace = trace; + SecretMasker = secretMasker; + State = state; + + // Copy the options + options = new EvaluationOptions(copy: options); + if (options.MaxMemory == 0) + { + // Set a reasonable default max memory + options.MaxMemory = 1048576; // 1 mb + } + Options = options; + Memory = new EvaluationMemory(options.MaxMemory, node); + + m_traceResults = new Dictionary(); + m_traceMemory = new MemoryCounter(null, options.MaxMemory); + } + + public ITraceWriter Trace { get; } + + public ISecretMasker SecretMasker { get; } + + public Object State { get; } + + internal EvaluationMemory Memory { get; } + + internal EvaluationOptions Options { get; } + + internal void SetTraceResult( + ExpressionNode node, + EvaluationResult result) + { + // Remove if previously added. This typically should not happen. This could happen + // due to a badly authored function. So we'll handle it and track memory correctly. + if (m_traceResults.TryGetValue(node, out String oldValue)) + { + m_traceMemory.Remove(oldValue); + m_traceResults.Remove(node); + } + + // Check max memory + String value = ExpressionUtil.FormatValue(SecretMasker, result); + if (m_traceMemory.TryAdd(value)) + { + // Store the result + m_traceResults[node] = value; + } + } + + internal Boolean TryGetTraceResult(ExpressionNode node, out String value) + { + return m_traceResults.TryGetValue(node, out value); + } + + private readonly Dictionary m_traceResults = new Dictionary(); + private readonly MemoryCounter m_traceMemory; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EvaluationMemory.cs b/src/Sdk/DTExpressions/Expressions/EvaluationMemory.cs new file mode 100644 index 00000000000..e3598519758 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EvaluationMemory.cs @@ -0,0 +1,111 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Expressions +{ + /// + /// This is an internal class only. + /// + /// This class is used to track current memory consumption + /// across the entire expression evaluation. + /// + internal sealed class EvaluationMemory + { + internal EvaluationMemory( + Int32 maxBytes, + ExpressionNode node) + { + m_maxAmount = maxBytes; + m_node = node; + } + + internal void AddAmount( + Int32 depth, + Int32 bytes, + Boolean trimDepth = false) + { + // Trim deeper depths + if (trimDepth) + { + while (m_maxActiveDepth > depth) + { + var amount = m_depths[m_maxActiveDepth]; + + if (amount > 0) + { + // Sanity check + if (amount > m_totalAmount) + { + throw new InvalidOperationException("Bytes to subtract exceeds total bytes"); + } + + // Subtract from the total + checked + { + m_totalAmount -= amount; + } + + // Reset the amount + m_depths[m_maxActiveDepth] = 0; + } + + m_maxActiveDepth--; + } + } + + // Grow the depths + if (depth > m_maxActiveDepth) + { + // Grow the list + while (m_depths.Count <= depth) + { + m_depths.Add(0); + } + + // Adjust the max active depth + m_maxActiveDepth = depth; + } + + checked + { + // Add to the depth + m_depths[depth] += bytes; + + // Add to the total + m_totalAmount += bytes; + } + + // Check max + if (m_totalAmount > m_maxAmount) + { + throw new InvalidOperationException(ExpressionResources.ExceededAllowedMemory(m_node?.ConvertToExpression())); + } + } + + internal static Int32 CalculateBytes(Object obj) + { + if (obj is String str) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + checked + { + return c_stringBaseOverhead + ((str?.Length ?? 0) * sizeof(Char)); + } + } + else + { + return c_minObjectSize; + } + } + + private const Int32 c_minObjectSize = 24; + private const Int32 c_stringBaseOverhead = 26; + private readonly List m_depths = new List(); + private readonly Int32 m_maxAmount; + private readonly ExpressionNode m_node; + private Int32 m_maxActiveDepth = -1; + private Int32 m_totalAmount; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EvaluationOptions.cs b/src/Sdk/DTExpressions/Expressions/EvaluationOptions.cs new file mode 100644 index 00000000000..e42260056a7 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EvaluationOptions.cs @@ -0,0 +1,43 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EvaluationOptions + { + public EvaluationOptions() + { + } + + public EvaluationOptions(EvaluationOptions copy) + { + if (copy != null) + { + Converters = copy.Converters; + MaxMemory = copy.MaxMemory; + TimeZone = copy.TimeZone; + UseCollectionInterfaces = copy.UseCollectionInterfaces; + } + } + + /// + /// Converters allow types to be coerced into data that is friendly + /// for expression functions to operate on it. + /// + /// As each node in the expression tree is evaluated, converters are applied. + /// When a node's result matches a converter type, the result is intercepted + /// by the converter, and converter result is used instead. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public IDictionary> Converters { get; set; } + + public Int32 MaxMemory { get; set; } + + public TimeZoneInfo TimeZone { get; set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public Boolean UseCollectionInterfaces { get; set; } // Feature flag for now behavior + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EvaluationResult.cs b/src/Sdk/DTExpressions/Expressions/EvaluationResult.cs new file mode 100644 index 00000000000..cb1d1cb3e54 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EvaluationResult.cs @@ -0,0 +1,828 @@ +using GitHub.DistributedTask.Expressions.CollectionAccessors; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EvaluationResult + { + internal EvaluationResult( + EvaluationContext context, + Int32 level, + Object val, + ValueKind kind, + Object raw) + : this(context, level, val, kind, raw, false) + { + } + + internal EvaluationResult( + EvaluationContext context, + Int32 level, + Object val, + ValueKind kind, + Object raw, + Boolean omitTracing) + { + m_level = level; + Value = val; + Kind = kind; + Raw = raw; + m_omitTracing = omitTracing; + + if (!omitTracing) + { + TraceValue(context); + } + } + + public ValueKind Kind { get; } + + /// + /// When a custom converter is applied to the node result, raw contains the original value + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public Object Raw { get; } + + public Object Value { get; } + + public int CompareTo( + EvaluationContext context, + EvaluationResult right) + { + Object leftValue; + ValueKind leftKind; + switch (Kind) + { + case ValueKind.Boolean: + case ValueKind.DateTime: + case ValueKind.Number: + case ValueKind.String: + case ValueKind.Version: + leftValue = Value; + leftKind = Kind; + break; + default: + leftValue = ConvertToNumber(context); // Will throw or succeed + leftKind = ValueKind.Number; + break; + } + + if (leftKind == ValueKind.Boolean) + { + Boolean b = right.ConvertToBoolean(context); + return ((Boolean)leftValue).CompareTo(b); + } + else if (leftKind == ValueKind.DateTime) + { + DateTimeOffset d = right.ConvertToDateTime(context); + return ((DateTimeOffset)leftValue).CompareTo(d); + } + else if (leftKind == ValueKind.Number) + { + Decimal d = right.ConvertToNumber(context); + return ((Decimal)leftValue).CompareTo(d); + } + else if (leftKind == ValueKind.String) + { + String s = right.ConvertToString(context); + return String.Compare(leftValue as String ?? String.Empty, s ?? String.Empty, StringComparison.OrdinalIgnoreCase); + } + else //if (leftKind == ValueKind.Version) + { + Version v = right.ConvertToVersion(context); + return (leftValue as Version).CompareTo(v); + } + } + + public Boolean ConvertToBoolean(EvaluationContext context) + { + Boolean result; + switch (Kind) + { + case ValueKind.Boolean: + return (Boolean)Value; // Not converted. Don't trace. + + case ValueKind.Number: + result = (Decimal)Value != 0m; // 0 converts to false, otherwise true. + TraceValue(context, result, ValueKind.Boolean); + return result; + + case ValueKind.String: + result = !String.IsNullOrEmpty(Value as String); + TraceValue(context, result, ValueKind.Boolean); + return result; + + case ValueKind.Array: + case ValueKind.DateTime: + case ValueKind.Object: + case ValueKind.Version: + result = true; + TraceValue(context, result, ValueKind.Boolean); + return result; + + case ValueKind.Null: + result = false; + TraceValue(context, result, ValueKind.Boolean); + return result; + + default: // Should never reach here. + throw new NotSupportedException($"Unable to convert value to Boolean. Unexpected value kind '{Kind}'."); + } + } + + public DateTimeOffset ConvertToDateTime(EvaluationContext context) + { + DateTimeOffset result; + if (TryConvertToDateTime(context, out result)) + { + return result; + } + + throw new TypeCastException(context?.SecretMasker, Value, fromKind: Kind, toKind: ValueKind.DateTime); + } + + public Object ConvertToNull(EvaluationContext context) + { + Object result; + if (TryConvertToNull(context, out result)) + { + return result; + } + + throw new TypeCastException(context?.SecretMasker, Value, fromKind: Kind, toKind: ValueKind.Null); + } + + public Decimal ConvertToNumber(EvaluationContext context) + { + Decimal result; + if (TryConvertToNumber(context, out result)) + { + return result; + } + + throw new TypeCastException(context?.SecretMasker, Value, fromKind: Kind, toKind: ValueKind.Number); + } + + public String ConvertToString(EvaluationContext context) + { + String result; + if (TryConvertToString(context, out result)) + { + return result; + } + + throw new TypeCastException(context?.SecretMasker, Value, fromKind: Kind, toKind: ValueKind.String); + } + + public Version ConvertToVersion(EvaluationContext context) + { + Version result; + if (TryConvertToVersion(context, out result)) + { + return result; + } + + throw new TypeCastException(context?.SecretMasker, Value, fromKind: Kind, toKind: ValueKind.Version); + } + + public Boolean Equals( + EvaluationContext context, + EvaluationResult right) + { + if (Kind == ValueKind.Boolean) + { + Boolean b = right.ConvertToBoolean(context); + return (Boolean)Value == b; + } + else if (Kind == ValueKind.DateTime) + { + DateTimeOffset d; + if (right.TryConvertToDateTime(context, out d)) + { + return (DateTimeOffset)Value == d; + } + } + else if (Kind == ValueKind.Number) + { + Decimal d; + if (right.TryConvertToNumber(context, out d)) + { + return (Decimal)Value == d; + } + } + else if (Kind == ValueKind.Version) + { + Version v; + if (right.TryConvertToVersion(context, out v)) + { + return (Version)Value == v; + } + } + else if (Kind == ValueKind.String) + { + String s; + if (right.TryConvertToString(context, out s)) + { + return String.Equals( + Value as String ?? String.Empty, + s ?? String.Empty, + StringComparison.OrdinalIgnoreCase); + } + } + else if (Kind == ValueKind.Array || Kind == ValueKind.Object) + { + return Kind == right.Kind && Object.ReferenceEquals(Value, right.Value); + } + else if (Kind == ValueKind.Null) + { + Object n; + if (right.TryConvertToNull(context, out n)) + { + return true; + } + } + + return false; + } + + public Boolean TryConvertToDateTime( + EvaluationContext context, + out DateTimeOffset result) + { + switch (Kind) + { + case ValueKind.DateTime: + result = (DateTimeOffset)Value; // Not converted. Don't trace again. + return true; + + case ValueKind.String: + if (TryParseDateTime(context?.Options, Value as String, out result)) + { + TraceValue(context, result, ValueKind.DateTime); + return true; + } + + TraceCoercionFailed(context, toKind: ValueKind.DateTime); + return false; + + case ValueKind.Array: + case ValueKind.Boolean: + case ValueKind.Null: + case ValueKind.Number: + case ValueKind.Object: + case ValueKind.Version: + result = default; + TraceCoercionFailed(context, toKind: ValueKind.DateTime); + return false; + + default: // Should never reach here. + throw new NotSupportedException($"Unable to determine whether value can be converted to Number. Unexpected value kind '{Kind}'."); + } + } + + public Boolean TryConvertToNull( + EvaluationContext context, + out Object result) + { + switch (Kind) + { + case ValueKind.Null: + result = null; // Not converted. Don't trace again. + return true; + + case ValueKind.String: + if (String.IsNullOrEmpty(Value as String)) + { + result = null; + TraceValue(context, result, ValueKind.Null); + return true; + } + + break; + } + + result = null; + TraceCoercionFailed(context, toKind: ValueKind.Null); + return false; + } + + public Boolean TryConvertToNumber( + EvaluationContext context, + out Decimal result) + { + switch (Kind) + { + case ValueKind.Boolean: + result = (Boolean)Value ? 1m : 0m; + TraceValue(context, result, ValueKind.Number); + return true; + + case ValueKind.Number: + result = (Decimal)Value; // Not converted. Don't trace again. + return true; + + case ValueKind.String: + String s = Value as String ?? String.Empty; + if (String.IsNullOrEmpty(s)) + { + result = 0m; + TraceValue(context, result, ValueKind.Number); + return true; + } + + if (Decimal.TryParse(s, s_numberStyles, CultureInfo.InvariantCulture, out result)) + { + TraceValue(context, result, ValueKind.Number); + return true; + } + + TraceCoercionFailed(context, toKind: ValueKind.Number); + return false; + + case ValueKind.Array: + case ValueKind.DateTime: + case ValueKind.Object: + case ValueKind.Version: + result = default(Decimal); + TraceCoercionFailed(context, toKind: ValueKind.Number); + return false; + + case ValueKind.Null: + result = 0m; + TraceValue(context, result, ValueKind.Number); + return true; + + default: // Should never reach here. + throw new NotSupportedException($"Unable to determine whether value can be converted to Number. Unexpected value kind '{Kind}'."); + } + } + + public Boolean TryConvertToString( + EvaluationContext context, + out String result) + { + switch (Kind) + { + case ValueKind.Boolean: + result = String.Format(CultureInfo.InvariantCulture, "{0}", Value); + TraceValue(context, result, ValueKind.String); + return true; + + case ValueKind.DateTime: + result = ((DateTimeOffset)Value).ToString(ExpressionConstants.DateTimeFormat, CultureInfo.InvariantCulture); + TraceValue(context, result, ValueKind.String); + return true; + + case ValueKind.Number: + result = ((Decimal)Value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture); + TraceValue(context, result, ValueKind.String); + return true; + + case ValueKind.String: + result = Value as String; // Not converted. Don't trace. + return true; + + case ValueKind.Version: + result = (Value as Version).ToString(); + TraceValue(context, result, ValueKind.String); + return true; + + case ValueKind.Null: + result = String.Empty; + TraceValue(context, result, ValueKind.Null); + return true; + + case ValueKind.Array: + case ValueKind.Object: + result = null; + TraceCoercionFailed(context, toKind: ValueKind.String); + return false; + + default: // Should never reach here. + throw new NotSupportedException($"Unable to convert to String. Unexpected value kind '{Kind}'."); + } + } + + public Boolean TryConvertToVersion( + EvaluationContext context, + out Version result) + { + switch (Kind) + { + case ValueKind.Boolean: + result = null; + TraceCoercionFailed(context, toKind: ValueKind.Version); + return false; + + case ValueKind.Number: + if (Version.TryParse(ConvertToString(context), out result)) + { + TraceValue(context, result, ValueKind.Version); + return true; + } + + TraceCoercionFailed(context, toKind: ValueKind.Version); + return false; + + case ValueKind.String: + String s = Value as String ?? String.Empty; + if (Version.TryParse(s, out result)) + { + TraceValue(context, result, ValueKind.Version); + return true; + } + + TraceCoercionFailed(context, toKind: ValueKind.Version); + return false; + + case ValueKind.Version: + result = Value as Version; // Not converted. Don't trace again. + return true; + + case ValueKind.Array: + case ValueKind.DateTime: + case ValueKind.Object: + case ValueKind.Null: + result = null; + TraceCoercionFailed(context, toKind: ValueKind.Version); + return false; + + default: // Should never reach here. + throw new NotSupportedException($"Unable to convert to Version. Unexpected value kind '{Kind}'."); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public Boolean TryGetCollectionInterface(out Object collection) + { + if ((Kind == ValueKind.Object || Kind == ValueKind.Array)) + { + var obj = Value; + if (obj is IReadOnlyObject) + { + collection = obj; + return true; + } + else if (obj is IDictionary dictionary1) + { + collection = new ReadOnlyDictionaryOfStringStringAccessor(new ReadOnlyDictionary(dictionary1)); + return true; + } + else if (obj is IDictionary dictionary2) + { + collection = new ReadOnlyDictionaryOfStringObjectAccessor(new ReadOnlyDictionary(dictionary2)); + return true; + } + else if (obj is IReadOnlyDictionary dictionary3) + { + collection = new ReadOnlyDictionaryOfStringStringAccessor(dictionary3); + return true; + } + else if (obj is IReadOnlyDictionary dictionary4) + { + collection = new ReadOnlyDictionaryOfStringObjectAccessor(dictionary4); + return true; + } + else if (obj is JObject jobject) + { + collection = new JObjectAccessor(jobject); + return true; + } + else if (obj is IReadOnlyArray) + { + collection = obj; + return true; + } + else if (obj is IList list1) + { + collection = new ListOfObjectAccessor(list1); + return true; + } + else if (obj is IReadOnlyList list2) + { + collection = new ReadOnlyListOfObjectAccessor(list2); + return true; + } + else if (obj is JArray jarray) + { + collection = new JArrayAccessor(jarray); + return true; + } + + var contract = s_serializer.Value.ContractResolver.ResolveContract(obj.GetType()); + if (contract is JsonObjectContract objectContract) + { + collection = new JsonObjectContractAccessor(objectContract, obj); + return true; + } + else if (contract is JsonDictionaryContract dictionaryContract && dictionaryContract.DictionaryKeyType == typeof(String)) + { + collection = new JsonDictionaryContractAccessor(dictionaryContract, obj); + return true; + } + } + + collection = null; + return false; + } + + /// + /// Useful for working with values that are not the direct evaluation result of a parameter. + /// This allows ExpressionNode authors to leverage the coercion and comparision functions + /// for any values. + /// + /// Also note, the value will be canonicalized (for example numeric types converted to decimal) and any + /// matching converters applied. + /// + public static EvaluationResult CreateIntermediateResult( + EvaluationContext context, + Object obj, + out ResultMemory conversionResultMemory) + { + var val = ExpressionUtil.ConvertToCanonicalValue(context?.Options, obj, out ValueKind kind, out Object raw, out conversionResultMemory); + return new EvaluationResult(context, 0, val, kind, raw, omitTracing: true); + } + + private void TraceCoercionFailed( + EvaluationContext context, + ValueKind toKind) + { + if (!m_omitTracing) + { + TraceVerbose(context, String.Format(CultureInfo.InvariantCulture, "=> Unable to coerce {0} to {1}.", Kind, toKind)); + } + } + + private void TraceValue(EvaluationContext context) + { + if (!m_omitTracing) + { + TraceValue(context, Value, Kind); + } + } + + private void TraceValue( + EvaluationContext context, + Object val, + ValueKind kind) + { + if (!m_omitTracing) + { + TraceVerbose(context, String.Concat("=> ", ExpressionUtil.FormatValue(context?.SecretMasker, val, kind))); + } + } + + private void TraceVerbose( + EvaluationContext context, + String message) + { + if (!m_omitTracing) + { + context?.Trace.Verbose(String.Empty.PadLeft(m_level * 2, '.') + (message ?? String.Empty)); + } + } + + private static Boolean TryParseDateTime( + EvaluationOptions options, + String s, + out DateTimeOffset result) + { + if (String.IsNullOrEmpty(s)) + { + result = default; + return false; + } + + s = s.Trim(); + var i = 0; + + // Year, month, day, hour, min, sec + if (!ReadInt32(s, 4, 4, ref i, out Int32 year) || + !ReadSeparator(s, ref i, new[] { '-', '/' }, out Char dateSeparator) || + !ReadInt32(s, 1, 2, ref i, out Int32 month) || + !ReadSeparator(s, ref i, dateSeparator) || + !ReadInt32(s, 1, 2, ref i, out Int32 day) || + !ReadSeparator(s, ref i, ' ', 'T') || + !ReadInt32(s, 1, 2, ref i, out Int32 hour) || + !ReadSeparator(s, ref i, ':') || + !ReadInt32(s, 1, 2, ref i, out Int32 minute) || + !ReadSeparator(s, ref i, ':') || + !ReadInt32(s, 1, 2, ref i, out Int32 second)) + { + result = default; + return false; + } + + // Fraction of second + Int32 ticks; + if (ExpressionUtil.SafeCharAt(s, i) == '.') + { + i++; + if (!ReadDigits(s, 1, 7, ref i, out String digits)) + { + result = default; + return false; + } + + if (digits.Length < 7) + { + digits = digits.PadRight(7, '0'); + } + + ticks = Int32.Parse(digits, NumberStyles.None, CultureInfo.InvariantCulture); + } + else + { + ticks = 0; + } + + TimeSpan offset; + + // End of string indicates local time zone + if (i >= s.Length) + { + // Determine the offset + var timeZone = options?.TimeZone ?? TimeZoneInfo.Local; + try + { + var dateTime = new DateTime(year, month, day, hour, minute, second, DateTimeKind.Unspecified); + offset = timeZone.GetUtcOffset(dateTime); + } + catch + { + result = default; + return false; + } + } + // Offset, then end of string + else if (!ReadOffset(s, ref i, out offset) || + i < s.Length) + { + result = default; + return false; + } + + // Construct the DateTimeOffset + try + { + result = new DateTimeOffset(year, month, day, hour, minute, second, offset); + } + catch + { + result = default; + return false; + } + + // Add fraction of second + if (ticks > 0) + { + result = result.AddTicks(ticks); + } + + return true; + } + + private static Boolean ReadDigits( + String str, + Int32 minLength, + Int32 maxLength, + ref Int32 index, + out String result) + { + var startIndex = index; + while (Char.IsDigit(ExpressionUtil.SafeCharAt(str, index))) + { + index++; + } + + var length = index - startIndex; + if (length < minLength || length > maxLength) + { + result = default; + return false; + } + + result = str.Substring(startIndex, length); + return true; + } + + private static Boolean ReadInt32( + String str, + Int32 minLength, + Int32 maxLength, + ref Int32 index, + out Int32 result) + { + if (!ReadDigits(str, minLength, maxLength, ref index, out String digits)) + { + result = default; + return false; + } + + result = Int32.Parse(digits, NumberStyles.None, CultureInfo.InvariantCulture); + return true; + } + + private static Boolean ReadSeparator( + String str, + ref Int32 index, + params Char[] allowed) + { + return ReadSeparator(str, ref index, allowed, out _); + } + + private static Boolean ReadSeparator( + String str, + ref Int32 index, + Char[] allowed, + out Char separator) + { + separator = ExpressionUtil.SafeCharAt(str, index++); + foreach (var a in allowed) + { + if (separator == a) + { + return true; + } + } + + separator = default; + return false; + } + + private static Boolean ReadOffset( + String str, + ref Int32 index, + out TimeSpan offset) + { + // Z indicates UTC + if (ExpressionUtil.SafeCharAt(str, index) == 'Z') + { + index++; + offset = TimeSpan.Zero; + return true; + } + + Boolean subtract; + + // Negative + if (ExpressionUtil.SafeCharAt(str, index) == '-') + { + index++; + subtract = true; + } + // Positive + else if (ExpressionUtil.SafeCharAt(str, index) == '+') + { + index++; + subtract = false; + } + // Invalid + else + { + offset = default; + return false; + } + + // Hour and minute + if (!ReadInt32(str, 1, 2, ref index, out Int32 hour) || + !ReadSeparator(str, ref index, ':') || + !ReadInt32(str, 1, 2, ref index, out Int32 minute)) + { + offset = default; + return false; + } + + // Construct the offset + if (subtract) + { + offset = TimeSpan.Zero.Subtract(new TimeSpan(hour, minute, 0)); + } + else + { + offset = new TimeSpan(hour, minute, 0); + } + + return true; + } + + private static readonly NumberStyles s_numberStyles = + NumberStyles.AllowDecimalPoint | + NumberStyles.AllowLeadingSign | + NumberStyles.AllowLeadingWhite | + NumberStyles.AllowThousands | + NumberStyles.AllowTrailingWhite; + private static readonly Lazy s_serializer = new Lazy(() => JsonUtility.CreateJsonSerializer()); + private readonly Int32 m_level; + private readonly Boolean m_omitTracing; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/EvaluationTraceWriter.cs b/src/Sdk/DTExpressions/Expressions/EvaluationTraceWriter.cs new file mode 100644 index 00000000000..0713171651e --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/EvaluationTraceWriter.cs @@ -0,0 +1,37 @@ +using System; +using GitHub.DistributedTask.Logging; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class EvaluationTraceWriter : ITraceWriter + { + public EvaluationTraceWriter(ITraceWriter trace, ISecretMasker secretMasker) + { + ArgumentUtility.CheckForNull(secretMasker, nameof(secretMasker)); + m_trace = trace; + m_secretMasker = secretMasker; + } + + public void Info(String message) + { + if (m_trace != null) + { + message = m_secretMasker.MaskSecrets(message); + m_trace.Info(message); + } + } + + public void Verbose(String message) + { + if (m_trace != null) + { + message = m_secretMasker.MaskSecrets(message); + m_trace.Verbose(message); + } + } + + private readonly ISecretMasker m_secretMasker; + private readonly ITraceWriter m_trace; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ExpressionConstants.cs b/src/Sdk/DTExpressions/Expressions/ExpressionConstants.cs new file mode 100644 index 00000000000..54f7b7d3eb5 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ExpressionConstants.cs @@ -0,0 +1,52 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Expressions +{ + internal static class ExpressionConstants + { + static ExpressionConstants() + { + AddFunction("and", 2, Int32.MaxValue); + AddFunction("coalesce", 2, Int32.MaxValue); + AddFunction("contains", 2, 2); + AddFunction("containsValue", 2, 2); + AddFunction("endsWith", 2, 2); + AddFunction("eq", 2, 2); + AddFunction("format", 1, Byte.MaxValue); + AddFunction("gt", 2, 2); + AddFunction("ge", 2, 2); + AddFunction("lt", 2, 2); + AddFunction("join", 2, 2); + AddFunction("le", 2, 2); + AddFunction("in", 2, Int32.MaxValue); + AddFunction("not", 1, 1); + AddFunction("ne", 2, 2); + AddFunction("notIn", 2, Int32.MaxValue); + AddFunction("or", 2, Int32.MaxValue); + AddFunction("startsWith", 2, 2); + AddFunction("xor", 2, 2); + } + + private static void AddFunction(String name, Int32 minParameters, Int32 maxParameters) + where T : FunctionNode, new() + { + WellKnownFunctions.Add(name, new FunctionInfo(name, minParameters, maxParameters)); + } + + internal static readonly String DateTimeFormat = @"yyyy\-MM\-dd\ HH\:mm\:sszzz"; + internal static readonly Int32 MaxDepth = 50; + internal static readonly Int32 MaxLength = 21000; // Under 85,000 large object heap threshold, even if .NET switches to UTF-32 + internal static readonly String NumberFormat = "0.#######"; + internal static readonly Dictionary WellKnownFunctions = new Dictionary(StringComparer.OrdinalIgnoreCase); + + // Punctuation + internal const Char StartIndex = '['; + internal const Char StartParameter = '('; + internal const Char EndIndex = ']'; + internal const Char EndParameter = ')'; + internal const Char Separator = ','; + internal const Char Dereference = '.'; + internal const Char Wildcard = '*'; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ExpressionException.cs b/src/Sdk/DTExpressions/Expressions/ExpressionException.cs new file mode 100644 index 00000000000..ef2623cd6bb --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ExpressionException.cs @@ -0,0 +1,22 @@ +using System; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions +{ + public class ExpressionException : Exception + { + internal ExpressionException(ISecretMasker secretMasker, String message) + { + if (secretMasker != null) + { + message = secretMasker.MaskSecrets(message); + } + + m_message = message; + } + + public override String Message => m_message; + + private readonly String m_message; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ExpressionNode.cs b/src/Sdk/DTExpressions/Expressions/ExpressionNode.cs new file mode 100644 index 00000000000..e08b8d90e0b --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ExpressionNode.cs @@ -0,0 +1,494 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Reflection; +using GitHub.DistributedTask.Logging; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ExpressionNode : IExpressionNode + { + internal ContainerNode Container { get; set; } + + internal Int32 Level { get; private set; } + + /// + /// The name is used for tracing. Normally the parser will set the name. However if a node + /// is added manually, then the name may not be set and will fallback to the type name. + /// + protected internal String Name + { + get + { + return !String.IsNullOrEmpty(m_name) ? m_name : this.GetType().Name; + } + + set + { + m_name = value; + } + } + + /// + /// Indicates whether the evalation result should be stored on the context and used + /// when the realized result is traced. + /// + protected abstract Boolean TraceFullyRealized { get; } + + internal abstract String ConvertToExpression(); + + internal abstract String ConvertToRealizedExpression(EvaluationContext context); + + /// + /// Evaluates the node + /// + protected virtual Object EvaluateCore(EvaluationContext context) + { + throw new InvalidOperationException($"Method {nameof(EvaluateCore)} not implemented"); + } + + /// + /// Evaluates the node + /// + /// The current expression context + /// + /// Helps determine how much memory is being consumed across the evaluation of the expression. + /// + protected virtual Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return EvaluateCore(context); + } + + /// + /// INode entry point. + /// + public T Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options = null) + { + if (Container != null) + { + // Do not localize. This is an SDK consumer error. + throw new NotSupportedException($"Expected {nameof(IExpressionNode)}.{nameof(Evaluate)} to be called on root node only."); + } + + ISecretMasker originalSecretMasker = secretMasker; + try + { + secretMasker = secretMasker?.Clone() ?? new SecretMasker(); + trace = new EvaluationTraceWriter(trace, secretMasker); + var context = new EvaluationContext(trace, secretMasker, state, options, this); + trace.Info($"Evaluating: {ConvertToExpression()}"); + + // String + if (typeof(T).Equals(typeof(String))) + { + String stringResult = EvaluateString(context); + TraceTreeResult(context, stringResult, ValueKind.String); + return (T)(Object)stringResult; + } + // Boolean + else if (typeof(T).Equals(typeof(Boolean))) + { + Boolean booleanResult = EvaluateBoolean(context); + TraceTreeResult(context, booleanResult, ValueKind.Boolean); + return (T)(Object)booleanResult; + } + // Version + else if (typeof(T).Equals(typeof(Version))) + { + Version versionResult = EvaluateVersion(context); + TraceTreeResult(context, versionResult, ValueKind.Version); + return (T)(Object)versionResult; + } + // DateTime types + else if (typeof(T).Equals(typeof(DateTimeOffset))) + { + DateTimeOffset dateTimeResult = EvaluateDateTime(context); + TraceTreeResult(context, dateTimeResult, ValueKind.DateTime); + return (T)(Object)dateTimeResult; + } + else if (typeof(T).Equals(typeof(DateTime))) + { + DateTimeOffset dateTimeResult = EvaluateDateTime(context); + TraceTreeResult(context, dateTimeResult, ValueKind.DateTime); + return (T)(Object)dateTimeResult.UtcDateTime; + } + + TypeInfo typeInfo = typeof(T).GetTypeInfo(); + if (typeInfo.IsPrimitive) + { + // Decimal + if (typeof(T).Equals(typeof(Decimal))) + { + Decimal decimalResult = EvaluateNumber(context); + TraceTreeResult(context, decimalResult, ValueKind.Number); + return (T)(Object)decimalResult; + } + // Other numeric types + else if (typeof(T).Equals(typeof(Byte)) || + typeof(T).Equals(typeof(SByte)) || + typeof(T).Equals(typeof(Int16)) || + typeof(T).Equals(typeof(UInt16)) || + typeof(T).Equals(typeof(Int32)) || + typeof(T).Equals(typeof(UInt32)) || + typeof(T).Equals(typeof(Int64)) || + typeof(T).Equals(typeof(UInt64)) || + typeof(T).Equals(typeof(Single)) || + typeof(T).Equals(typeof(Double))) + { + Decimal decimalResult = EvaluateNumber(context); + trace.Verbose($"Converting expression result to type {typeof(T).Name}"); + try + { + T numericResult = (T)Convert.ChangeType(decimalResult, typeof(T)); + + // Note, the value is converted back to decimal before tracing, in order to leverage the same + // util-formatting method used in other places. + TraceTreeResult(context, Convert.ToDecimal((Object)numericResult), ValueKind.Number); + + return numericResult; + } + catch (Exception exception) + { + context.Trace.Verbose($"Failed to convert the result number into the type {typeof(T).Name}. {exception.Message}"); + throw new TypeCastException( + secretMasker, + value: decimalResult, + fromKind: ValueKind.Number, + toType: typeof(T), + error: exception.Message); + } + } + } + + // Generic evaluate + EvaluationResult result = Evaluate(context); + TraceTreeResult(context, result.Value, result.Kind); + + // JToken + if (typeof(T).Equals(typeof(JToken))) + { + if (result.Value is null) + { + return default; + } + else if (result.Value is JToken) + { + return (T)result.Value; + } + else + { + return (T)(Object)JToken.FromObject(result.Value, JsonUtility.CreateJsonSerializer()); + } + } + // Object or Array + else if (result.Kind == ValueKind.Object || result.Kind == ValueKind.Array) + { + Type resultType = result.Value.GetType(); + context.Trace.Verbose($"Result type: {resultType.Name}"); + if (typeInfo.IsAssignableFrom(resultType.GetTypeInfo())) + { + return (T)result.Value; + } + else + { + context.Trace.Verbose($"Unable to assign result to the type {typeof(T).Name}"); + throw new TypeCastException(fromType: resultType, toType: typeof(T)); + } + } + // Null + else if (result.Kind == ValueKind.Null) + { + return default; + } + // String + else if (result.Kind == ValueKind.String) + { + // Treat empty string as null + String stringResult = result.Value as String; + if (String.IsNullOrEmpty(stringResult)) + { + return default; + } + + // Otherwise deserialize + try + { + return JsonUtility.FromString(stringResult); + } + catch (Exception exception) when (exception is JsonReaderException || exception is JsonSerializationException) + { + context.Trace.Verbose($"Failed to json-deserialize the result string into the type {typeof(T).Name}. {exception.Message}"); + throw new TypeCastException( + context.SecretMasker, + value: stringResult, + fromKind: ValueKind.String, + toType: typeof(T), + error: exception.Message); + } + } + else + { + context.Trace.Verbose($"Unable to convert from kind {result.Kind} to the type {typeof(T).Name}"); + throw new TypeCastException( + context.SecretMasker, + value: result.Value, + fromKind: result.Kind, + toType: typeof(T)); + } + } + finally + { + if (secretMasker != null && secretMasker != originalSecretMasker) + { + (secretMasker as IDisposable)?.Dispose(); + secretMasker = null; + } + } + } + + /// + /// INode entry point. + /// + public Object Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options = null) + { + return Evaluate(trace, secretMasker, state, options, out _, out _); + } + + /// + /// INode entry point. + /// + public Boolean EvaluateBoolean( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state) + { + return Evaluate(trace, secretMasker, state); + } + + /// + /// INode entry point. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public EvaluationResult EvaluateResult( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options) + { + var val = Evaluate(trace, secretMasker, state, options, out ValueKind kind, out Object raw); + return new EvaluationResult(null, 0, val, kind, raw, omitTracing: true); + } + + /// + /// This function is intended only for ExpressionNode authors to call. The EvaluationContext + /// caches result-state specific to the evaluation instance. + /// + public EvaluationResult Evaluate(EvaluationContext context) + { + // Evaluate + Level = Container == null ? 0 : Container.Level + 1; + TraceVerbose(context, Level, $"Evaluating {Name}:"); + var coreResult = EvaluateCore(context, out ResultMemory coreMemory); + + if (coreMemory == null) + { + coreMemory = new ResultMemory(); + } + + // Convert to canonical value + var val = ExpressionUtil.ConvertToCanonicalValue(context.Options, coreResult, out ValueKind kind, out Object raw, out ResultMemory conversionMemory); + + // The depth can be safely trimmed when the total size of the core result is known, + // or when the total size of the core result can easily be determined. + var trimDepth = coreMemory.IsTotal || (Object.ReferenceEquals(raw, null) && s_simpleKinds.Contains(kind)); + + // Account for the memory overhead of the core result + var coreBytes = coreMemory.Bytes ?? EvaluationMemory.CalculateBytes(raw ?? val); + context.Memory.AddAmount(Level, coreBytes, trimDepth); + + // Account for the memory overhead of the conversion result + if (!Object.ReferenceEquals(raw, null)) + { + if (conversionMemory == null) + { + conversionMemory = new ResultMemory(); + } + + var conversionBytes = conversionMemory.Bytes ?? EvaluationMemory.CalculateBytes(val); + context.Memory.AddAmount(Level, conversionBytes); + } + + var result = new EvaluationResult(context, Level, val, kind, raw); + + // Store the trace result + if (this.TraceFullyRealized) + { + context.SetTraceResult(this, result); + } + + return result; + } + + /// + /// This function is intended only for ExpressionNode authors to call during evaluation. + /// The EvaluationContext caches result-state specific to the evaluation instance. + /// + public Boolean EvaluateBoolean(EvaluationContext context) + { + return Evaluate(context).ConvertToBoolean(context); + } + + /// + /// This function is intended only for ExpressionNode authors to call during evaluation. + /// The EvaluationContext caches result-state specific to the evaluation instance. + /// + public DateTimeOffset EvaluateDateTime(EvaluationContext context) + { + return Evaluate(context).ConvertToDateTime(context); + } + + /// + /// This function is intended only for ExpressionNode authors to call during evaluation. + /// The EvaluationContext caches result-state specific to the evaluation instance. + /// + public Decimal EvaluateNumber(EvaluationContext context) + { + return Evaluate(context).ConvertToNumber(context); + } + + /// + /// This function is intended only for ExpressionNode authors to call during evaluation. + /// The EvaluationContext caches result-state specific to the evaluation instance. + /// + public String EvaluateString(EvaluationContext context) + { + return Evaluate(context).ConvertToString(context); + } + + /// + /// This function is intended only for ExpressionNode authors to call during evaluation. + /// The EvaluationContext caches result-state specific to the evaluation instance. + /// + public Version EvaluateVersion(EvaluationContext context) + { + return Evaluate(context).ConvertToVersion(context); + } + + public virtual IEnumerable GetParameters() where T : IExpressionNode + { + return new T[0]; + } + + protected MemoryCounter CreateMemoryCounter(EvaluationContext context) + { + return new MemoryCounter(this, context.Options.MaxMemory); + } + + private Object Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options, + out ValueKind kind, + out Object raw) + { + if (Container != null) + { + // Do not localize. This is an SDK consumer error. + throw new NotSupportedException($"Expected {nameof(IExpressionNode)}.{nameof(Evaluate)} to be called on root node only."); + } + + ISecretMasker originalSecretMasker = secretMasker; + try + { + // Evaluate + secretMasker = secretMasker?.Clone() ?? new SecretMasker(); + trace = new EvaluationTraceWriter(trace, secretMasker); + var context = new EvaluationContext(trace, secretMasker, state, options, this); + trace.Info($"Evaluating: {ConvertToExpression()}"); + EvaluationResult result = Evaluate(context); + + // Trace the result + TraceTreeResult(context, result.Value, result.Kind); + + kind = result.Kind; + raw = result.Raw; + return result.Value; + } + finally + { + if (secretMasker != null && secretMasker != originalSecretMasker) + { + (secretMasker as IDisposable)?.Dispose(); + secretMasker = null; + } + } + } + + private void TraceTreeResult( + EvaluationContext context, + Object result, + ValueKind kind) + { + // Get the realized expression + String realizedExpression = ConvertToRealizedExpression(context); + + // Format the result + String traceValue = ExpressionUtil.FormatValue(context.SecretMasker, result, kind); + + // Only trace the realized expression if it is meaningfully different + if (!String.Equals(realizedExpression, traceValue, StringComparison.Ordinal)) + { + if (kind == ValueKind.Number && + String.Equals(realizedExpression, $"'{traceValue}'", StringComparison.Ordinal)) + { + // Don't bother tracing the realized expression when the result is a number and the + // realized expresion is a precisely matching string. + } + else + { + context.Trace.Info($"Expanded: {realizedExpression}"); + } + } + + // Always trace the result + context.Trace.Info($"Result: {traceValue}"); + } + + private static void TraceVerbose( + EvaluationContext context, + Int32 level, + String message) + { + context.Trace.Verbose(String.Empty.PadLeft(level * 2, '.') + (message ?? String.Empty)); + } + + private static readonly ValueKind[] s_simpleKinds = new[] + { + ValueKind.Boolean, + ValueKind.DateTime, + ValueKind.Null, + ValueKind.Number, + ValueKind.String, + ValueKind.Version, + }; + + private String m_name; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ExpressionParser.cs b/src/Sdk/DTExpressions/Expressions/ExpressionParser.cs new file mode 100644 index 00000000000..9e562f38863 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ExpressionParser.cs @@ -0,0 +1,547 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ExpressionParser + { + public ExpressionParser(): this(null) + { + } + + public ExpressionParser(ExpressionParserOptions options) + { + m_parserOptions = options ?? new ExpressionParserOptions(); + } + + public IExpressionNode CreateTree( + String expression, + ITraceWriter trace, + IEnumerable namedValues, + IEnumerable functions) + { + var context = new ParseContext(expression, trace, namedValues, functions, allowUnknownKeywords: false, allowKeywordHyphens: m_parserOptions.AllowHyphens); + context.Trace.Info($"Parsing expression: <{expression}>"); + return CreateTree(context); + } + + public void ValidateSyntax( + String expression, + ITraceWriter trace) + { + var context = new ParseContext(expression, trace, namedValues: null, functions: null, allowUnknownKeywords: true, allowKeywordHyphens: m_parserOptions.AllowHyphens); + context.Trace.Info($"Validating expression syntax: <{expression}>"); + CreateTree(context); + } + + private static IExpressionNode CreateTree(ParseContext context) + { + while (TryGetNextToken(context)) + { + switch (context.Token.Kind) + { + // Punctuation + case TokenKind.StartIndex: + HandleStartIndex(context); + break; + case TokenKind.EndIndex: + HandleEndIndex(context); + break; + case TokenKind.EndParameter: + HandleEndParameter(context); + break; + case TokenKind.Separator: + HandleSeparator(context); + break; + case TokenKind.Dereference: + HandleDereference(context); + break; + case TokenKind.Wildcard: + HandleWildcard(context); + break; + + // Functions + case TokenKind.WellKnownFunction: + case TokenKind.ExtensionFunction: + HandleFunction(context); + break; + + // Leaf values + case TokenKind.Boolean: + case TokenKind.Number: + case TokenKind.Version: + case TokenKind.String: + case TokenKind.ExtensionNamedValue: + HandleValue(context); + break; + + // Unknown keyword + case TokenKind.UnknownKeyword: + HandleUnknownKeyword(context); + break; + + // Malformed + case TokenKind.Unrecognized: + throw new ParseException(ParseExceptionKind.UnrecognizedValue, context.Token, context.Expression); + + // Unexpected + case TokenKind.PropertyName: // PropertyName should never reach here (HandleDereference reads next token). + case TokenKind.StartParameter: // StartParameter is only expected by HandleFunction. + default: + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + // Validate depth. + if (context.Containers.Count >= ExpressionConstants.MaxDepth) + { + throw new ParseException(ParseExceptionKind.ExceededMaxDepth, token: null, expression: context.Expression); + } + } + + // Validate all containers were closed. + if (context.Containers.Count > 0) + { + ContainerInfo container = context.Containers.Peek(); + if (container.Node is FunctionNode) + { + throw new ParseException(ParseExceptionKind.UnclosedFunction, container.Token, context.Expression); + } + else + { + throw new ParseException(ParseExceptionKind.UnclosedIndexer, container.Token, context.Expression); + } + } + + return context.Root; + } + + private static bool TryGetNextToken(ParseContext context) + { + context.LastToken = context.Token; + if (context.Lexer.TryGetNextToken(ref context.Token)) + { + // Adjust indent level. + int indentLevel = context.Containers.Count; + if (indentLevel > 0) + { + switch (context.Token.Kind) + { + case TokenKind.StartParameter: + case TokenKind.EndParameter: + case TokenKind.EndIndex: + indentLevel--; + break; + } + } + + String indent = String.Empty.PadRight(indentLevel * 2, '.'); + switch (context.Token.Kind) + { + // Literal values + case TokenKind.Boolean: + context.Trace.Verbose($"{indent}{ExpressionUtil.FormatValue(null, context.Token.ParsedValue, ValueKind.Boolean)}"); + break; + case TokenKind.Number: + context.Trace.Verbose($"{indent}{ExpressionUtil.FormatValue(null, context.Token.ParsedValue, ValueKind.Number)}"); + break; + case TokenKind.Version: + context.Trace.Verbose($"{indent}{ExpressionUtil.FormatValue(null, context.Token.ParsedValue, ValueKind.Version)}"); + break; + case TokenKind.String: + context.Trace.Verbose($"{indent}{ExpressionUtil.FormatValue(null, context.Token.ParsedValue, ValueKind.String)}"); + break; + // Property or unrecognized + case TokenKind.PropertyName: + case TokenKind.Unrecognized: + context.Trace.Verbose($"{indent}{context.Token.Kind} {ExpressionUtil.FormatValue(null, context.Token.RawValue, ValueKind.String)}"); + break; + // Function or punctuation + case TokenKind.WellKnownFunction: + case TokenKind.ExtensionFunction: + case TokenKind.ExtensionNamedValue: + case TokenKind.Wildcard: + case TokenKind.UnknownKeyword: + case TokenKind.StartIndex: + case TokenKind.StartParameter: + case TokenKind.EndIndex: + case TokenKind.EndParameter: + case TokenKind.Separator: + case TokenKind.Dereference: + context.Trace.Verbose($"{indent}{context.Token.RawValue}"); + break; + default: // Should never reach here. + throw new NotSupportedException($"Unexpected token kind: {context.Token.Kind}"); + } + + return true; + } + + return false; + } + + private static void HandleStartIndex(ParseContext context) + { + // Validate follows ")", "]", "*", or a property name. + if (context.LastToken == null || + (context.LastToken.Kind != TokenKind.EndParameter && context.LastToken.Kind != TokenKind.EndIndex && context.LastToken.Kind != TokenKind.PropertyName && context.LastToken.Kind != TokenKind.ExtensionNamedValue && context.LastToken.Kind != TokenKind.UnknownKeyword && context.LastToken.Kind != TokenKind.Wildcard)) + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + // Wrap the object being indexed into. + var indexer = new IndexerNode(); + ExpressionNode obj = null; + if (context.Containers.Count > 0) + { + ContainerNode container = context.Containers.Peek().Node; + Int32 objIndex = container.Parameters.Count - 1; + obj = container.Parameters[objIndex]; + container.ReplaceParameter(objIndex, indexer); + } + else + { + obj = context.Root; + context.Root = indexer; + } + + indexer.AddParameter(obj); + + // Update the container stack. + context.Containers.Push(new ContainerInfo() { Node = indexer, Token = context.Token }); + } + + private static void HandleDereference(ParseContext context) + { + // Validate follows ")", "]", "*", or a property name. + if (context.LastToken == null || + (context.LastToken.Kind != TokenKind.EndParameter && context.LastToken.Kind != TokenKind.EndIndex && context.LastToken.Kind != TokenKind.PropertyName && context.LastToken.Kind != TokenKind.ExtensionNamedValue && context.LastToken.Kind != TokenKind.UnknownKeyword && context.LastToken.Kind != TokenKind.Wildcard)) + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + // Wrap the object being indexed into. + var indexer = new IndexerNode(); + ExpressionNode obj = null; + if (context.Containers.Count > 0) + { + ContainerNode container = context.Containers.Peek().Node; + Int32 objIndex = container.Parameters.Count - 1; + obj = container.Parameters[objIndex]; + container.ReplaceParameter(objIndex, indexer); + } + else + { + obj = context.Root; + context.Root = indexer; + } + + indexer.AddParameter(obj); + + // Validate a token follows. + if (!TryGetNextToken(context)) + { + throw new ParseException(ParseExceptionKind.ExpectedPropertyName, context.LastToken, context.Expression); + } + + if (context.Token.Kind == TokenKind.PropertyName) + { + indexer.AddParameter(new LiteralValueNode(context.Token.RawValue)); + } + else if (context.Token.Kind == TokenKind.Wildcard) + { + // For a wildcard we add a third parameter, a boolean set to true, so that we know it's a wildcard. + indexer.AddParameter(new LiteralValueNode(context.Token.RawValue)); + indexer.AddParameter(new LiteralValueNode(true)); + } + else + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + } + + private static void HandleWildcard(ParseContext context) + { + // Validate follows "[". + if (context.LastToken == null || + context.LastToken.Kind != TokenKind.StartIndex) + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + // When we have a wildcard, we add the wildcard and also third boolean parameter set to true. + // This lets us differentiate downstream from '*'. + context.Containers.Peek().Node.AddParameter(new LiteralValueNode(context.Token.RawValue)); + context.Containers.Peek().Node.AddParameter(new LiteralValueNode(true)); + } + + private static void HandleEndParameter(ParseContext context) + { + ContainerInfo container = context.Containers.Count > 0 ? context.Containers.Peek() : null; // Validate: + if (container == null || // 1) Container is not null + !(container.Node is FunctionNode) || // 2) Container is a function + container.Node.Parameters.Count < GetMinParamCount(context, container.Token) || // 3) Not below min param threshold + container.Node.Parameters.Count > GetMaxParamCount(context, container.Token) || // 4) Not above max param threshold + context.LastToken.Kind == TokenKind.Separator) // 5) Last token is not a separator + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + context.Containers.Pop(); + } + + private static void HandleEndIndex(ParseContext context) + { + IndexerNode indexer = context.Containers.Count > 0 ? context.Containers.Peek().Node as IndexerNode : null; + // // Validate: + if (indexer == null || // 1) Container is an indexer + !(indexer.Parameters.Count == 2 || indexer.Parameters.Count == 3)) // 2) Can be 2 or 3 parameters. It's 3 parameters when we are using a filtered array since we + // set a boolean along with the wildcard. + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + context.Containers.Pop(); + } + + private static void HandleUnknownKeyword(ParseContext context) + { + // Validate. + if (!context.AllowUnknownKeywords) + { + throw new ParseException(ParseExceptionKind.UnrecognizedValue, context.Token, context.Expression); + } + + // Try handle function. + if (HandleFunction(context, bestEffort: true)) + { + return; + } + + // Handle named value. + HandleValue(context); + } + + private static void HandleValue(ParseContext context) + { + // Validate either A) is the first token OR B) follows "[" "(" or ",". + if (context.LastToken != null && + context.LastToken.Kind != TokenKind.StartIndex && + context.LastToken.Kind != TokenKind.StartParameter && + context.LastToken.Kind != TokenKind.Separator) + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + // Create the node. + ExpressionNode node; + switch (context.Token.Kind) + { + case TokenKind.ExtensionNamedValue: + String name = context.Token.RawValue; + node = context.ExtensionNamedValues[name].CreateNode(); + node.Name = name; + break; + case TokenKind.UnknownKeyword: + node = new UnknownNamedValueNode(); + node.Name = context.Token.RawValue; + break; + default: + node = new LiteralValueNode(context.Token.ParsedValue); + break; + } + + // Update the tree. + if (context.Root == null) + { + context.Root = node; + } + else + { + context.Containers.Peek().Node.AddParameter(node); + } + } + + private static void HandleSeparator(ParseContext context) + { + ContainerInfo container = context.Containers.Count > 0 ? context.Containers.Peek() : null; // Validate: + if (container == null || // 1) Container is not null + !(container.Node is FunctionNode) || // 2) Container is a function + container.Node.Parameters.Count < 1 || // 3) At least one parameter + container.Node.Parameters.Count >= GetMaxParamCount(context, container.Token) ||// 4) Under max parameters threshold + context.LastToken.Kind == TokenKind.Separator) // 5) Last token is not a separator + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + } + + private static Boolean HandleFunction( + ParseContext context, + Boolean bestEffort = false) + { + // Validate either A) is first token OR B) follows "," or "[" or "(". + if (context.LastToken != null && + (context.LastToken.Kind != TokenKind.Separator && + context.LastToken.Kind != TokenKind.StartIndex && + context.LastToken.Kind != TokenKind.StartParameter)) + { + if (bestEffort) + { + return false; + } + + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + + // Validate '(' follows. + if (bestEffort) + { + Token nextToken = null; + if (!context.Lexer.TryPeekNextToken(ref nextToken) || nextToken.Kind != TokenKind.StartParameter) + { + return false; + } + + TryGetNextToken(context); + } + else if (!TryGetNextToken(context) || context.Token.Kind != TokenKind.StartParameter) + { + throw new ParseException(ParseExceptionKind.ExpectedStartParameter, context.LastToken, context.Expression); + } + + // Create the node. + FunctionNode node; + String name = context.LastToken.RawValue; + switch (context.LastToken.Kind) + { + case TokenKind.WellKnownFunction: + node = ExpressionConstants.WellKnownFunctions[name].CreateNode(); + node.Name = name; + break; + case TokenKind.ExtensionFunction: + node = context.ExtensionFunctions[name].CreateNode(); + node.Name = name; + break; + case TokenKind.UnknownKeyword: + node = new UnknownFunctionNode(); + node.Name = name; + break; + default: + // Should never reach here. + throw new NotSupportedException($"Unexpected function token name: '{context.LastToken.Kind}'"); + } + + // Update the tree. + if (context.Root == null) + { + context.Root = node; + } + else + { + context.Containers.Peek().Node.AddParameter(node); + } + + // Update the container stack. + context.Containers.Push(new ContainerInfo() { Node = node, Token = context.LastToken }); + return true; + } + + private static int GetMinParamCount( + ParseContext context, + Token token) + { + switch (token.Kind) + { + case TokenKind.WellKnownFunction: + return ExpressionConstants.WellKnownFunctions[token.RawValue].MinParameters; + case TokenKind.ExtensionFunction: + return context.ExtensionFunctions[token.RawValue].MinParameters; + case TokenKind.UnknownKeyword: + return 0; + default: // Should never reach here. + throw new NotSupportedException($"Unexpected token kind '{token.Kind}'. Unable to determine min param count."); + } + } + + private static Int32 GetMaxParamCount( + ParseContext context, + Token token) + { + switch (token.Kind) + { + case TokenKind.WellKnownFunction: + return ExpressionConstants.WellKnownFunctions[token.RawValue].MaxParameters; + case TokenKind.ExtensionFunction: + return context.ExtensionFunctions[token.RawValue].MaxParameters; + case TokenKind.UnknownKeyword: + return Int32.MaxValue; + default: // Should never reach here. + throw new NotSupportedException($"Unexpected token kind '{token.Kind}'. Unable to determine max param count."); + } + } + + private ExpressionParserOptions m_parserOptions; + + private sealed class ContainerInfo + { + public ContainerNode Node { get; set; } + + public Token Token { get; set; } + } + + private sealed class ParseContext + { + public readonly Boolean AllowUnknownKeywords; + public readonly Stack Containers = new Stack(); + public readonly String Expression; + public readonly Dictionary ExtensionFunctions = new Dictionary(StringComparer.OrdinalIgnoreCase); + public readonly Dictionary ExtensionNamedValues = new Dictionary(StringComparer.OrdinalIgnoreCase); + public readonly LexicalAnalyzer Lexer; + public readonly ITraceWriter Trace; + public Token Token; + public Token LastToken; + public ExpressionNode Root; + + public ParseContext( + String expression, + ITraceWriter trace, + IEnumerable namedValues, + IEnumerable functions, + Boolean allowUnknownKeywords = false, + Boolean allowKeywordHyphens = false) + { + Expression = expression ?? String.Empty; + if (Expression.Length > ExpressionConstants.MaxLength) + { + throw new ParseException(ParseExceptionKind.ExceededMaxLength, token: null, expression: Expression); + } + + Trace = trace ?? new NoOperationTraceWriter(); + foreach (INamedValueInfo namedValueInfo in (namedValues ?? new INamedValueInfo[0])) + { + ExtensionNamedValues.Add(namedValueInfo.Name, namedValueInfo); + } + + foreach (IFunctionInfo functionInfo in (functions ?? new IFunctionInfo[0])) + { + ExtensionFunctions.Add(functionInfo.Name, functionInfo); + } + + AllowUnknownKeywords = allowUnknownKeywords; + Lexer = new LexicalAnalyzer(Expression, namedValues: ExtensionNamedValues.Keys, functions: ExtensionFunctions.Keys, allowKeywordHyphens); + } + + private class NoOperationTraceWriter : ITraceWriter + { + public void Info(String message) + { + } + + public void Verbose(String message) + { + } + } + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ExpressionParserOptions.cs b/src/Sdk/DTExpressions/Expressions/ExpressionParserOptions.cs new file mode 100644 index 00000000000..49e267e82d5 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ExpressionParserOptions.cs @@ -0,0 +1,13 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public sealed class ExpressionParserOptions + { + public Boolean AllowHyphens + { + get; + set; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ExpressionUtil.cs b/src/Sdk/DTExpressions/Expressions/ExpressionUtil.cs new file mode 100644 index 00000000000..ffb8219d595 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ExpressionUtil.cs @@ -0,0 +1,211 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Reflection; +using GitHub.DistributedTask.Logging; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal static class ExpressionUtil + { + internal static Object ConvertToCanonicalValue( + EvaluationOptions options, + Object val, + out ValueKind kind, + out Object raw, + out ResultMemory conversionResultMemory) + { + // Apply converter + if (options?.Converters?.Count > 0 && + !Object.ReferenceEquals(val, null) && + options.Converters.TryGetValue(val.GetType(), out Converter convert)) + { + raw = val; + var conversionResult = convert(val); + val = conversionResult.Result; + conversionResultMemory = conversionResult.ResultMemory; + } + else + { + raw = null; + conversionResultMemory = null; + } + + if (Object.ReferenceEquals(val, null)) + { + kind = ValueKind.Null; + return null; + } + else if (val is IString str) + { + kind = ValueKind.String; + return str.GetString(); + } + else if (val is IBoolean booleanValue) + { + kind = ValueKind.Boolean; + return booleanValue.GetBoolean(); + } + else if (val is INumber num) + { + kind = ValueKind.Number; + return num.GetNumber(); + } + else if (val is JToken) + { + var jtoken = val as JToken; + switch (jtoken.Type) + { + case JTokenType.Array: + kind = ValueKind.Array; + return jtoken; + case JTokenType.Boolean: + kind = ValueKind.Boolean; + return jtoken.ToObject(); + case JTokenType.Float: + case JTokenType.Integer: + kind = ValueKind.Number; + // todo: test the extents of the conversion + return jtoken.ToObject(); + case JTokenType.Null: + kind = ValueKind.Null; + return null; + case JTokenType.Object: + kind = ValueKind.Object; + return jtoken; + case JTokenType.String: + kind = ValueKind.String; + return jtoken.ToObject(); + } + } + else if (val is String) + { + kind = ValueKind.String; + return val; + } + else if (val is Version) + { + kind = ValueKind.Version; + return val; + } + else if (!val.GetType().GetTypeInfo().IsClass) + { + if (val is Boolean) + { + kind = ValueKind.Boolean; + return val; + } + else if (val is DateTimeOffset) + { + kind = ValueKind.DateTime; + return val; + } + else if (val is DateTime dateTime) + { + kind = ValueKind.DateTime; + switch (dateTime.Kind) + { + // When Local: convert to preferred time zone + case DateTimeKind.Local: + var targetTimeZone = options?.TimeZone ?? TimeZoneInfo.Local; + var localDateTimeOffset = new DateTimeOffset(dateTime); + return TimeZoneInfo.ConvertTime(localDateTimeOffset, targetTimeZone); + // When Unspecified: assume preferred time zone + case DateTimeKind.Unspecified: + var timeZone = options?.TimeZone ?? TimeZoneInfo.Local; + var offset = timeZone.GetUtcOffset(dateTime); + return new DateTimeOffset(dateTime, offset); + // When UTC: keep UTC + case DateTimeKind.Utc: + return new DateTimeOffset(dateTime); + default: + throw new NotSupportedException($"Unexpected DateTimeKind '{dateTime.Kind}'"); // Should never happen + } + } + else if (val is Decimal || val is Byte || val is SByte || val is Int16 || val is UInt16 || val is Int32 || val is UInt32 || val is Int64 || val is UInt64 || val is Single || val is Double) + { + kind = ValueKind.Number; + return Convert.ToDecimal(val); + } + else if (val is Enum) + { + var strVal = String.Format(CultureInfo.InvariantCulture, "{0:G}", val); + if (Decimal.TryParse(strVal, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out Decimal decVal)) + { + kind = ValueKind.Number; + return decVal; + } + + kind = ValueKind.String; + return strVal; + } + } + + kind = ValueKind.Object; + return val; + } + + internal static String FormatValue( + ISecretMasker secretMasker, + EvaluationResult evaluationResult) + { + return FormatValue(secretMasker, evaluationResult.Value, evaluationResult.Kind); + } + + internal static String FormatValue( + ISecretMasker secretMasker, + Object value, + ValueKind kind) + { + switch (kind) + { + case ValueKind.Boolean: + return ((Boolean)value).ToString(); + + case ValueKind.DateTime: + var strDateTime = "(DateTime)" + ((DateTimeOffset)value).ToString(ExpressionConstants.DateTimeFormat, CultureInfo.InvariantCulture); + return secretMasker != null ? secretMasker.MaskSecrets(strDateTime) : strDateTime; + + case ValueKind.Number: + var strNumber = ((Decimal)value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture); + return secretMasker != null ? secretMasker.MaskSecrets(strNumber) : strNumber; + + case ValueKind.String: + // Mask secrets before string-escaping. + var strValue = secretMasker != null ? secretMasker.MaskSecrets(value as String) : value as String; + return $"'{StringEscape(strValue)}'"; + + case ValueKind.Version: + String strVersion = secretMasker != null ? secretMasker.MaskSecrets(value.ToString()) : value.ToString(); + return $"v{strVersion}"; + + case ValueKind.Array: + case ValueKind.Null: + case ValueKind.Object: + return kind.ToString(); + + default: // Should never reach here. + throw new NotSupportedException($"Unable to convert to realized expression. Unexpected value kind: {kind}"); + } + } + + internal static Char SafeCharAt( + String str, + Int32 index) + { + if (str.Length > index) + { + return str[index]; + } + + return '\0'; + } + + internal static String StringEscape(String value) + { + return String.IsNullOrEmpty(value) ? String.Empty : value.Replace("'", "''"); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/FormatNode.cs b/src/Sdk/DTExpressions/Expressions/FormatNode.cs new file mode 100644 index 00000000000..19325f4ae7a --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/FormatNode.cs @@ -0,0 +1,394 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class FormatNode : FunctionNode + { + protected sealed override Object EvaluateCore(EvaluationContext context) + { + var format = Parameters[0].EvaluateString(context); + var index = 0; + var result = new FormatResultBuilder(this, context, CreateMemoryCounter(context)); + while (index < format.Length) + { + var lbrace = format.IndexOf('{', index); + var rbrace = format.IndexOf('}', index); + + // Left brace + if (lbrace >= 0 && (rbrace < 0 || rbrace > lbrace)) + { + // Escaped left brace + if (ExpressionUtil.SafeCharAt(format, lbrace + 1) == '{') + { + result.Append(format.Substring(index, lbrace - index + 1)); + index = lbrace + 2; + } + // Left brace, number, optional format specifiers, right brace + else if (rbrace > lbrace + 1 && + ReadArgIndex(format, lbrace + 1, out Byte argIndex, out Int32 endArgIndex) && + ReadFormatSpecifiers(format, endArgIndex + 1, out String formatSpecifiers, out rbrace)) + { + // Check parameter count + if (argIndex > Parameters.Count - 2) + { + throw new FormatException(ExpressionResources.InvalidFormatArgIndex(format)); + } + + // Append the portion before the left brace + if (lbrace > index) + { + result.Append(format.Substring(index, lbrace - index)); + } + + // Append the arg + result.Append(argIndex, formatSpecifiers); + index = rbrace + 1; + } + else + { + throw new FormatException(ExpressionResources.InvalidFormatString(format)); + } + } + // Right brace + else if (rbrace >= 0) + { + // Escaped right brace + if (ExpressionUtil.SafeCharAt(format, rbrace + 1) == '}') + { + result.Append(format.Substring(index, rbrace - index + 1)); + index = rbrace + 2; + } + else + { + throw new FormatException(ExpressionResources.InvalidFormatString(format)); + } + } + // Last segment + else + { + result.Append(format.Substring(index)); + break; + } + } + + return result.ToString(); + } + + private Boolean ReadArgIndex( + String str, + Int32 startIndex, + out Byte result, + out Int32 endIndex) + { + // Count the number of digits + var length = 0; + while (Char.IsDigit(ExpressionUtil.SafeCharAt(str, startIndex + length))) + { + length++; + } + + // Validate at least one digit + if (length < 1) + { + result = default; + endIndex = default; + return false; + } + + // Parse the number + endIndex = startIndex + length - 1; + return Byte.TryParse(str.Substring(startIndex, length), NumberStyles.None, CultureInfo.InvariantCulture, out result); + } + + private Boolean ReadFormatSpecifiers( + String str, + Int32 startIndex, + out String result, + out Int32 rbrace) + { + // No format specifiers + var c = ExpressionUtil.SafeCharAt(str, startIndex); + if (c == '}') + { + result = String.Empty; + rbrace = startIndex; + return true; + } + + // Validate starts with ":" + if (c != ':') + { + result = default; + rbrace = default; + return false; + } + + // Read the specifiers + var specifiers = new StringBuilder(); + var index = startIndex + 1; + while (true) + { + // Validate not the end of the string + if (index >= str.Length) + { + result = default; + rbrace = default; + return false; + } + + c = str[index]; + + // Not right-brace + if (c != '}') + { + specifiers.Append(c); + index++; + } + // Escaped right-brace + else if (ExpressionUtil.SafeCharAt(str, index + 1) == '}') + { + specifiers.Append('}'); + index += 2; + } + // Closing right-brace + else + { + result = specifiers.ToString(); + rbrace = index; + return true; + } + } + } + + private sealed class FormatResultBuilder + { + internal FormatResultBuilder( + FormatNode node, + EvaluationContext context, + MemoryCounter counter) + { + m_node = node; + m_context = context; + m_counter = counter; + m_cache = new ArgValue[node.Parameters.Count - 1]; + } + + // Build the final string. This is when lazy segments are evaluated. + public override String ToString() + { + return String.Join( + String.Empty, + m_segments.Select(obj => + { + if (obj is Lazy lazy) + { + return lazy.Value; + } + else + { + return obj as String; + } + })); + } + + // Append a static value + internal void Append(String value) + { + if (value?.Length > 0) + { + // Track memory + m_counter.Add(value); + + // Append the segment + m_segments.Add(value); + } + } + + // Append an argument + internal void Append( + Int32 argIndex, + String formatSpecifiers) + { + // Delay execution until the final ToString + m_segments.Add(new Lazy(() => + { + String result; + + // Get the arg from the cache + var argValue = m_cache[argIndex]; + + // Evaluate the arg and cache the result + if (argValue == null) + { + // The evaluation result is required when format specifiers are used. Otherwise the string + // result is required. Go ahead and store both values. Since ConvertToString produces tracing, + // we need to run that now so the tracing appears in order in the log. + var evaluationResult = m_node.Parameters[argIndex + 1].Evaluate(m_context); + var stringResult = evaluationResult.ConvertToString(m_context); + argValue = new ArgValue(evaluationResult, stringResult); + m_cache[argIndex] = argValue; + } + + // No format specifiers + if (String.IsNullOrEmpty(formatSpecifiers)) + { + result = argValue.StringResult; + } + // DateTime + else if (argValue.EvaluationResult.Kind == ValueKind.DateTime) + { + result = FormatDateTime((DateTimeOffset)argValue.EvaluationResult.Value, formatSpecifiers); + } + // Invalid + else + { + throw new FormatException(ExpressionResources.InvalidFormatSpecifiers(formatSpecifiers, argValue.EvaluationResult.Kind)); + } + + // Track memory + if (!String.IsNullOrEmpty(result)) + { + m_counter.Add(result); + } + + return result; + })); + } + + private String FormatDateTime( + DateTimeOffset dateTime, + String specifiers) + { + var result = new StringBuilder(); + var i = 0; + while (true) + { + // Get the next specifier + var specifier = GetNextSpecifier(specifiers, ref i); + + // Check end of string + if (String.IsNullOrEmpty(specifier)) + { + break; + } + + // Append the value + switch (specifier) + { + case "yyyy": + case "yy": + case "MM": + case "dd": + case "HH": + case "mm": + case "ss": + case "ff": + case "fff": + case "ffff": + case "fffff": + case "ffffff": + case "fffffff": + case "zzz": + result.Append(dateTime.ToString(specifier)); + break; + + // .Net requires a leading % for some specifiers + case "M": + case "d": + case "H": + case "m": + case "s": + case "f": + case "K": + result.Append(dateTime.ToString("%" + specifier)); + break; + + default: + // Escaped character + if (specifier[0] == '\\') + { + result.Append(specifier[1]); + } + else if (specifier[0] == ' ') + { + result.Append(specifier); + } + // Unexpected + else + { + throw new FormatException(ExpressionResources.InvalidFormatSpecifiers(specifiers, ValueKind.DateTime)); + } + break; + } + } + + return result.ToString(); + } + + private String GetNextSpecifier( + String specifiers, + ref Int32 index) + { + // End of string + if (index >= specifiers.Length) + { + return String.Empty; + } + + // Get the first char + var startIndex = index; + var c = specifiers[index++]; + + // Escaped + if (c == '\\') + { + // End of string + if (index >= specifiers.Length) + { + throw new FormatException(ExpressionResources.InvalidFormatSpecifiers(specifiers, ValueKind.DateTime)); + } + + index++; + } + // Find consecutive matches + else + { + while (index < specifiers.Length && specifiers[index] == c) + { + index++; + } + } + + return specifiers.Substring(startIndex, index - startIndex); + } + + private readonly ArgValue[] m_cache; + private readonly EvaluationContext m_context; + private readonly MemoryCounter m_counter; + private readonly FormatNode m_node; + private readonly List m_segments = new List(); + } + + /// + /// Stores an EvaluateResult and the value converted to a String. + /// + private sealed class ArgValue + { + public ArgValue( + EvaluationResult evaluationResult, + String stringResult) + { + EvaluationResult = evaluationResult; + StringResult = stringResult; + } + + public EvaluationResult EvaluationResult { get; } + + public String StringResult { get; } + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/FunctionInfo.cs b/src/Sdk/DTExpressions/Expressions/FunctionInfo.cs new file mode 100644 index 00000000000..6852ea977bb --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/FunctionInfo.cs @@ -0,0 +1,26 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public class FunctionInfo : IFunctionInfo + where T : FunctionNode, new() + { + public FunctionInfo(String name, Int32 minParameters, Int32 maxParameters) + { + Name = name; + MinParameters = minParameters; + MaxParameters = maxParameters; + } + + public String Name { get; } + + public Int32 MinParameters { get; } + + public Int32 MaxParameters { get; } + + public FunctionNode CreateNode() + { + return new T(); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/FunctionNode.cs b/src/Sdk/DTExpressions/Expressions/FunctionNode.cs new file mode 100644 index 00000000000..6722f36a34f --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/FunctionNode.cs @@ -0,0 +1,45 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Linq; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class FunctionNode : ContainerNode + { + /// + /// Generally this should not be overridden. True indicates the result of the node is traced as part of the "expanded" + /// (i.e. "realized") trace information. Otherwise the node expression is printed, and parameters to the node may or + /// may not be fully realized - depending on each respective parameter's trace-fully-realized setting. + /// + /// The purpose is so the end user can understand how their expression expanded at run time. For example, consider + /// the expression: eq(variables.publish, 'true'). The runtime-expanded expression may be: eq('true', 'true') + /// + protected override Boolean TraceFullyRealized => true; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "{0}({1})", + Name, + String.Join(", ", Parameters.Select(x => x.ConvertToExpression()))); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "{0}({1})", + Name, + String.Join(", ", Parameters.Select(x => x.ConvertToRealizedExpression(context)))); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/GreaterThanNode.cs b/src/Sdk/DTExpressions/Expressions/GreaterThanNode.cs new file mode 100644 index 00000000000..2c101b76674 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/GreaterThanNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class GreaterThanNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Parameters[0].Evaluate(context).CompareTo(context, Parameters[1].Evaluate(context)) > 0; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/GreaterThanOrEqualNode.cs b/src/Sdk/DTExpressions/Expressions/GreaterThanOrEqualNode.cs new file mode 100644 index 00000000000..85116ed678d --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/GreaterThanOrEqualNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class GreaterThanOrEqualNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Parameters[0].Evaluate(context).CompareTo(context, Parameters[1].Evaluate(context)) >= 0; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IBoolean.cs b/src/Sdk/DTExpressions/Expressions/IBoolean.cs new file mode 100644 index 00000000000..b5a1fec8b54 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IBoolean.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IBoolean + { + Boolean GetBoolean(); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IExpressionNode.cs b/src/Sdk/DTExpressions/Expressions/IExpressionNode.cs new file mode 100644 index 00000000000..52c0d9a9a97 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IExpressionNode.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IExpressionNode + { + /// + /// Evaluates the expression and attempts to cast or deserialize the result to the specified + /// type. The specified type can either be simple type or a JSON-serializable class. Allowed + /// simple types are: Boolean, String, Version, Byte, SByte, Int16, UInt16, Int32, UInt32, + /// Int64, UInt64, Single, Double, or Decimal. When a JSON-serializable class is specified, the + /// following rules are applied: If the type of the evaluation result object, is assignable to + /// the specified type, then the result will be cast and returned. If the evaluation result + /// object is a String, it will be deserialized as the specified type. If the evaluation result + /// object is null, null will be returned. + /// + /// Optional trace writer + /// Optional secret masker + /// State object for custom evaluation function nodes and custom named-value nodes + T Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options = null); + + /// + /// Evaluates the expression and returns the result. + /// + /// Optional trace writer + /// Optional secret masker + /// State object for custom evaluation function nodes and custom named-value nodes + Object Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options = null); + + /// + /// Evaluates the expression and casts the result to a Boolean. + /// + /// Optional trace writer + /// Optional secret masker + /// State object for custom evaluation function nodes and custom named-value nodes + Boolean EvaluateBoolean( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state); + + IEnumerable GetParameters() where T : IExpressionNode; + + /// + /// Evaluates the expression and returns the result, wrapped in the SDK helper + /// for converting, comparing, and traversing objects. + /// + /// Optional trace writer + /// Optional secret masker + /// State object for custom evaluation function nodes and custom named-value nodes + /// Evaluation options + [EditorBrowsable(EditorBrowsableState.Never)] + EvaluationResult EvaluateResult( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IFunctionInfo.cs b/src/Sdk/DTExpressions/Expressions/IFunctionInfo.cs new file mode 100644 index 00000000000..0aad42a8c2d --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IFunctionInfo.cs @@ -0,0 +1,12 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public interface IFunctionInfo + { + String Name { get; } + Int32 MinParameters { get; } + Int32 MaxParameters { get; } + FunctionNode CreateNode(); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/INamedValueInfo.cs b/src/Sdk/DTExpressions/Expressions/INamedValueInfo.cs new file mode 100644 index 00000000000..94dfcd3c63c --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/INamedValueInfo.cs @@ -0,0 +1,10 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public interface INamedValueInfo + { + String Name { get; } + NamedValueNode CreateNode(); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/INumber.cs b/src/Sdk/DTExpressions/Expressions/INumber.cs new file mode 100644 index 00000000000..7baa3752966 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/INumber.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface INumber + { + Decimal GetNumber(); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IReadOnlyArray.cs b/src/Sdk/DTExpressions/Expressions/IReadOnlyArray.cs new file mode 100644 index 00000000000..686e8bc2691 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IReadOnlyArray.cs @@ -0,0 +1,11 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IReadOnlyArray : IReadOnlyList + { + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IReadOnlyObject.cs b/src/Sdk/DTExpressions/Expressions/IReadOnlyObject.cs new file mode 100644 index 00000000000..757a17c12d0 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IReadOnlyObject.cs @@ -0,0 +1,11 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IReadOnlyObject : IReadOnlyDictionary + { + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IString.cs b/src/Sdk/DTExpressions/Expressions/IString.cs new file mode 100644 index 00000000000..c7d677ce604 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IString.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IString + { + String GetString(); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ITraceWriter.cs b/src/Sdk/DTExpressions/Expressions/ITraceWriter.cs new file mode 100644 index 00000000000..c1b7fa1f79e --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ITraceWriter.cs @@ -0,0 +1,10 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public interface ITraceWriter + { + void Info(String message); + void Verbose(String message); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/InNode.cs b/src/Sdk/DTExpressions/Expressions/InNode.cs new file mode 100644 index 00000000000..7807e19425a --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/InNode.cs @@ -0,0 +1,24 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class InNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + EvaluationResult left = Parameters[0].Evaluate(context); + for (Int32 i = 1; i < Parameters.Count; i++) + { + EvaluationResult right = Parameters[i].Evaluate(context); + if (left.Equals(context, right)) + { + return true; + } + } + + return false; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/IndexerNode.cs b/src/Sdk/DTExpressions/Expressions/IndexerNode.cs new file mode 100644 index 00000000000..37d8181043e --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/IndexerNode.cs @@ -0,0 +1,452 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class IndexerNode : ContainerNode + { + internal IndexerNode() + { + Name = "indexer"; + } + + protected sealed override Boolean TraceFullyRealized => true; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "{0}[{1}]", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return ConvertToExpression(); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + EvaluationResult firstParameter = Parameters[0].Evaluate(context); + + if (context.Options.UseCollectionInterfaces) + { + if (!firstParameter.TryGetCollectionInterface(out Object collection)) + { + // Even if we can't get the collection interface, return empty filtered array if it is a wildcard. + if (Parameters.Count > 2) + { + resultMemory = null; + return new FilteredArray(); + } + + resultMemory = null; + return null; + } + + // Handle operating on a filtered array + if (collection is FilteredArray filteredArray) + { + return HandleFilteredArray(context, filteredArray, out resultMemory); + } + // Handle operating on an object + else if (collection is IReadOnlyObject obj) + { + return HandleObject(context, obj, out resultMemory); + } + // Handle operating on an array + else if (collection is IReadOnlyArray array) + { + return HandleArray(context, array, out resultMemory); + } + + resultMemory = null; + return null; + } + else + { + Object result = null; + + if (firstParameter.Kind == ValueKind.Array && firstParameter.Value is JArray) + { + var jarray = firstParameter.Value as JArray; + EvaluationResult index = Parameters[1].Evaluate(context); + if (index.Kind == ValueKind.Number) + { + Decimal d = (Decimal)index.Value; + if (d >= 0m && d < (Decimal)jarray.Count && d == Math.Floor(d)) + { + result = jarray[(Int32)d]; + } + } + else if (index.Kind == ValueKind.String && !String.IsNullOrEmpty(index.Value as String)) + { + Decimal d; + if (index.TryConvertToNumber(context, out d)) + { + if (d >= 0m && d < (Decimal)jarray.Count && d == Math.Floor(d)) + { + result = jarray[(Int32)d]; + } + } + } + } + else if (firstParameter.Kind == ValueKind.Object) + { + if (firstParameter.Value is JObject) + { + var jobject = firstParameter.Value as JObject; + EvaluationResult index = Parameters[1].Evaluate(context); + String s; + if (index.TryConvertToString(context, out s)) + { + result = jobject[s]; + } + } + else if (firstParameter.Value is IDictionary) + { + var dictionary = firstParameter.Value as IDictionary; + EvaluationResult index = Parameters[1].Evaluate(context); + if (index.TryConvertToString(context, out String key)) + { + if (!dictionary.TryGetValue(key, out String resultString)) + { + result = null; + } + else + { + result = resultString; + } + } + } + else if (firstParameter.Value is IDictionary) + { + var dictionary = firstParameter.Value as IDictionary; + EvaluationResult index = Parameters[1].Evaluate(context); + String s; + if (index.TryConvertToString(context, out s)) + { + if (!dictionary.TryGetValue(s, out result)) + { + result = null; + } + } + } + else if (firstParameter.Value is IReadOnlyDictionary) + { + var dictionary = firstParameter.Value as IReadOnlyDictionary; + EvaluationResult index = Parameters[1].Evaluate(context); + if (index.TryConvertToString(context, out String key)) + { + if (!dictionary.TryGetValue(key, out String resultString)) + { + result = null; + } + else + { + result = resultString; + } + } + } + else if (firstParameter.Value is IReadOnlyDictionary) + { + var dictionary = firstParameter.Value as IReadOnlyDictionary; + EvaluationResult index = Parameters[1].Evaluate(context); + String s; + if (index.TryConvertToString(context, out s)) + { + if (!dictionary.TryGetValue(s, out result)) + { + result = null; + } + } + } + else + { + var contract = s_serializer.Value.ContractResolver.ResolveContract(firstParameter.Value.GetType()); + var objectContract = contract as JsonObjectContract; + if (objectContract != null) + { + EvaluationResult index = Parameters[1].Evaluate(context); + if (index.TryConvertToString(context, out String key)) + { + var property = objectContract.Properties.GetClosestMatchProperty(key); + if (property != null) + { + result = objectContract.Properties[property.PropertyName].ValueProvider.GetValue(firstParameter.Value); + } + } + } + else + { + var dictionaryContract = contract as JsonDictionaryContract; + if (dictionaryContract != null && dictionaryContract.DictionaryKeyType == typeof(String)) + { + EvaluationResult index = Parameters[1].Evaluate(context); + if (index.TryConvertToString(context, out String key)) + { + var genericMethod = s_tryGetValueTemplate.Value.MakeGenericMethod(dictionaryContract.DictionaryValueType); + resultMemory = null; + return genericMethod.Invoke(null, new[] { firstParameter.Value, key }); + } + } + } + } + } + + resultMemory = null; + return result; + } + } + + private Object HandleFilteredArray( + EvaluationContext context, + FilteredArray filteredArray, + out ResultMemory resultMemory) + { + EvaluationResult indexResult = Parameters[1].Evaluate(context); + var indexHelper = new IndexHelper(indexResult, context); + + Boolean isFilter; + if (Parameters.Count > 2) + { + isFilter = true; + + if (!String.Equals(indexHelper.StringIndex, ExpressionConstants.Wildcard.ToString(), StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Unexpected filter '{indexHelper.StringIndex}'"); + } + } + else + { + isFilter = false; + } + + var result = new FilteredArray(); + var counter = new MemoryCounter(this, context.Options.MaxMemory); + + foreach (var item in filteredArray) + { + // Leverage the expression SDK to traverse the object + var itemResult = EvaluationResult.CreateIntermediateResult(context, item, out _); + if (itemResult.TryGetCollectionInterface(out Object nestedCollection)) + { + // Apply the index to a child object + if (nestedCollection is IReadOnlyObject nestedObject) + { + if (isFilter) + { + foreach (var val in nestedObject.Values) + { + result.Add(val); + counter.Add(IntPtr.Size); + } + } + else if (indexHelper.HasStringIndex) + { + if (nestedObject.TryGetValue(indexHelper.StringIndex, out Object nestedObjectValue)) + { + result.Add(nestedObjectValue); + counter.Add(IntPtr.Size); + } + } + } + // Apply the index to a child array + else if (nestedCollection is IReadOnlyArray nestedArray) + { + if (isFilter) + { + foreach (var val in nestedArray) + { + result.Add(val); + counter.Add(IntPtr.Size); + } + } + else if (indexHelper.HasIntegerIndex && + indexHelper.IntegerIndex < nestedArray.Count) + { + result.Add(nestedArray[indexHelper.IntegerIndex]); + counter.Add(IntPtr.Size); + } + } + } + } + + resultMemory = new ResultMemory { Bytes = counter.CurrentBytes }; + return result; + } + + private Object HandleObject( + EvaluationContext context, + IReadOnlyObject obj, + out ResultMemory resultMemory) + { + EvaluationResult indexResult = Parameters[1].Evaluate(context); + var indexHelper = new IndexHelper(indexResult, context); + + if (indexHelper.HasStringIndex) + { + Boolean isFilter = Parameters.Count > 2; + + if (isFilter) + { + var filteredArray = new FilteredArray(); + var counter = new MemoryCounter(this, context.Options.MaxMemory); + + foreach (var val in obj.Values) + { + filteredArray.Add(val); + counter.Add(IntPtr.Size); + } + + resultMemory = new ResultMemory { Bytes = counter.CurrentBytes }; + return filteredArray; + } + else if (obj.TryGetValue(indexHelper.StringIndex, out Object result)) + { + resultMemory = null; + return result; + } + } + + resultMemory = null; + return null; + } + + private Object HandleArray( + EvaluationContext context, + IReadOnlyArray array, + out ResultMemory resultMemory) + { + // Similar to as above but for an array + EvaluationResult indexResult = Parameters[1].Evaluate(context); + var indexHelper = new IndexHelper(indexResult, context); + + // When we are operating on a array and it has three parameters, with the second being a string * and the third being a true boolean, it's a filtered array. + if (Parameters.Count > 2) + { + var filtered = new FilteredArray(); + var counter = new MemoryCounter(this, context.Options.MaxMemory); + + foreach (var x in array) + { + filtered.Add(x); + counter.Add(IntPtr.Size); + } + + resultMemory = new ResultMemory { Bytes = counter.CurrentBytes }; + return filtered; + } + + if (indexHelper.HasIntegerIndex && indexHelper.IntegerIndex < array.Count) + { + resultMemory = null; + return array[indexHelper.IntegerIndex]; + } + + resultMemory = null; + return null; + } + + // todo: remove with feature flag cleanup for "UseCollectionInterfaces" + private static Object TryGetValue( + IDictionary dictionary, + String key) + { + TValue value; + if (!dictionary.TryGetValue(key, out value)) + { + return null; + } + + return value; + } + + private class FilteredArray : IReadOnlyArray + { + public FilteredArray() + { + m_list = new List(); + } + + public void Add(Object o) + { + m_list.Add(o); + } + + public Int32 Count => m_list.Count; + + public Object this[Int32 index] => m_list[index]; + + public IEnumerator GetEnumerator() => m_list.GetEnumerator(); + + IEnumerator IEnumerable.GetEnumerator() => m_list.GetEnumerator(); + + private readonly IList m_list; + } + + private class IndexHelper + { + public Boolean HasIntegerIndex => m_integerIndex.Value.Item1; + public Int32 IntegerIndex => m_integerIndex.Value.Item2; + + public Boolean HasStringIndex => m_stringIndex.Value.Item1; + public String StringIndex => m_stringIndex.Value.Item2; + + public IndexHelper( + EvaluationResult result, + EvaluationContext context) + { + m_result = result; + m_context = context; + + m_integerIndex = new Lazy>(() => + { + if (m_result.TryConvertToNumber(m_context, out Decimal decimalIndex) && + decimalIndex >= 0m) + { + return new Tuple(true, (Int32)Math.Floor(decimalIndex)); + } + + return new Tuple(false, default(Int32)); + }); + + m_stringIndex = new Lazy>(() => + { + if (m_result.TryConvertToString(m_context, out String stringIndex)) + { + return new Tuple(true, stringIndex); + } + + return new Tuple(false, null); + }); + } + + private Lazy> m_integerIndex; + private Lazy> m_stringIndex; + + private readonly EvaluationResult m_result; + private readonly EvaluationContext m_context; + } + + // todo: remove these properties with feature flag cleanup for "UseCollectionInterfaces" + private static Lazy s_serializer = new Lazy(() => JsonUtility.CreateJsonSerializer()); + private static Lazy s_tryGetValueTemplate = new Lazy(() => typeof(IndexerNode).GetTypeInfo().GetMethod(nameof(TryGetValue), BindingFlags.NonPublic | BindingFlags.Static)); + } +} diff --git a/src/Sdk/DTExpressions/Expressions/JoinNode.cs b/src/Sdk/DTExpressions/Expressions/JoinNode.cs new file mode 100644 index 00000000000..cdef727d76b --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/JoinNode.cs @@ -0,0 +1,69 @@ +using System; +using System.Text; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class JoinNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => true; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + var items = Parameters[1].Evaluate(context); + + if (items.TryGetCollectionInterface(out var collection) && collection is IReadOnlyArray array) + { + if (array.Count > 0) + { + var result = new StringBuilder(); + var memory = new MemoryCounter(this, context.Options.MaxMemory); + + // Append the first item + var item = array[0]; + var itemResult = EvaluationResult.CreateIntermediateResult(context, item, out _); + if (itemResult.TryConvertToString(context, out String itemString)) + { + memory.Add(itemString); + result.Append(itemString); + } + + // More items? + if (array.Count > 1) + { + var separator = Parameters[0].EvaluateString(context); + + for (var i = 1; i < array.Count; i++) + { + // Append the separator + memory.Add(separator); + result.Append(separator); + + // Append the next item + var nextItem = array[i]; + var nextItemResult = EvaluationResult.CreateIntermediateResult(context, nextItem, out _); + if (nextItemResult.TryConvertToString(context, out String nextItemString)) + { + memory.Add(nextItemString); + result.Append(nextItemString); + } + } + } + + return result.ToString(); + } + else + { + return String.Empty; + } + } + else if (items.TryConvertToString(context, out String str)) + { + return str; + } + else + { + return String.Empty; + } + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/LessThanNode.cs b/src/Sdk/DTExpressions/Expressions/LessThanNode.cs new file mode 100644 index 00000000000..08018719e20 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/LessThanNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class LessThanNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Parameters[0].Evaluate(context).CompareTo(context, Parameters[1].Evaluate(context)) < 0; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/LessThanOrEqualNode.cs b/src/Sdk/DTExpressions/Expressions/LessThanOrEqualNode.cs new file mode 100644 index 00000000000..39367fc22aa --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/LessThanOrEqualNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class LessThanOrEqualNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Parameters[0].Evaluate(context).CompareTo(context, Parameters[1].Evaluate(context)) <= 0; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/LexicalAnalyzer.cs b/src/Sdk/DTExpressions/Expressions/LexicalAnalyzer.cs new file mode 100644 index 00000000000..a688aac49f1 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/LexicalAnalyzer.cs @@ -0,0 +1,293 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Text; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class LexicalAnalyzer + { + public LexicalAnalyzer(String expression, IEnumerable namedValues, IEnumerable functions, Boolean allowKeywordHyphens) + { + m_expression = expression; + m_extensionNamedValues = new HashSet(namedValues ?? new String[0], StringComparer.OrdinalIgnoreCase); + m_extensionFunctions = new HashSet(functions ?? new String[0], StringComparer.OrdinalIgnoreCase); + m_allowKeyHyphens = allowKeywordHyphens; + } + + public Boolean TryGetNextToken(ref Token token) + { + // Skip whitespace. + while (m_index < m_expression.Length && Char.IsWhiteSpace(m_expression[m_index])) + { + m_index++; + } + + // Test end of string. + if (m_index >= m_expression.Length) + { + token = null; + return false; + } + + // Read the first character to determine the type of token. + Char c = m_expression[m_index]; + switch (c) + { + case ExpressionConstants.StartIndex: + token = new Token(TokenKind.StartIndex, c, m_index++); + break; + case ExpressionConstants.StartParameter: + token = new Token(TokenKind.StartParameter, c, m_index++); + break; + case ExpressionConstants.EndIndex: + token = new Token(TokenKind.EndIndex, c, m_index++); + break; + case ExpressionConstants.EndParameter: + token = new Token(TokenKind.EndParameter, c, m_index++); + break; + case ExpressionConstants.Separator: + token = new Token(TokenKind.Separator, c, m_index++); + break; + case ExpressionConstants.Wildcard: + token = new Token(TokenKind.Wildcard, c, m_index++); + break; + case '\'': + token = ReadStringToken(); + break; + default: + if (c == '.') + { + if (m_lastToken == null || + m_lastToken.Kind == TokenKind.Separator || + m_lastToken.Kind == TokenKind.StartIndex || + m_lastToken.Kind == TokenKind.StartParameter) + { + token = ReadNumberOrVersionToken(); + } + else + { + token = new Token(TokenKind.Dereference, c, m_index++); + } + } + else if (c == '-' || (c >= '0' && c <= '9')) + { + token = ReadNumberOrVersionToken(); + } + else + { + token = ReadKeywordToken(m_allowKeyHyphens); + } + + break; + } + + m_lastToken = token; + return true; + } + + public Boolean TryPeekNextToken(ref Token token) + { + // Record the state. + Int32 index = m_index; + Token lastToken = m_lastToken; + + // Get next token. + Boolean result = TryGetNextToken(ref token); + + // Restore the state. + m_index = index; + m_lastToken = lastToken; + + return result; + } + + private Token ReadNumberOrVersionToken() + { + Int32 startIndex = m_index; + Int32 periods = 0; + do + { + if (m_expression[m_index] == '.') + { + periods++; + } + + m_index++; + } + while (m_index < m_expression.Length && (!TestWhitespaceOrPunctuation(m_expression[m_index]) || m_expression[m_index] == '.')); + + Int32 length = m_index - startIndex; + String str = m_expression.Substring(startIndex, length); + if (periods >= 2) + { + Version version; + if (Version.TryParse(str, out version)) + { + return new Token(TokenKind.Version, str, startIndex, version); + } + } + else + { + // Note, NumberStyles.AllowThousands cannot be allowed since comma has special meaning as a token separator. + Decimal d; + if (Decimal.TryParse( + str, + NumberStyles.AllowDecimalPoint | NumberStyles.AllowLeadingSign, + CultureInfo.InvariantCulture, + out d)) + { + return new Token(TokenKind.Number, str, startIndex, d); + } + } + + return new Token(TokenKind.Unrecognized, str, startIndex); + } + + private Token ReadKeywordToken(bool allowHyphen) + { + // Read to the end of the keyword. + Int32 startIndex = m_index; + m_index++; // Skip the first char. It is already known to be the start of the keyword. + while (m_index < m_expression.Length && !TestWhitespaceOrPunctuation(m_expression[m_index])) + { + m_index++; + } + + // Test if valid keyword character sequence. + Int32 length = m_index - startIndex; + String str = m_expression.Substring(startIndex, length); + if (TestKeyword(str, allowHyphen)) + { + // Test if follows property dereference operator. + if (m_lastToken != null && m_lastToken.Kind == TokenKind.Dereference) + { + return new Token(TokenKind.PropertyName, str, startIndex); + } + + // Boolean + if (str.Equals(Boolean.TrueString, StringComparison.OrdinalIgnoreCase)) + { + return new Token(TokenKind.Boolean, str, startIndex, true); + } + else if (str.Equals(Boolean.FalseString, StringComparison.OrdinalIgnoreCase)) + { + return new Token(TokenKind.Boolean, str, startIndex, false); + } + // Well-known function + else if (ExpressionConstants.WellKnownFunctions.ContainsKey(str)) + { + return new Token(TokenKind.WellKnownFunction, str, startIndex); + } + // Extension value + else if (m_extensionNamedValues.Contains(str)) + { + return new Token(TokenKind.ExtensionNamedValue, str, startIndex); + } + // Extension function + else if (m_extensionFunctions.Contains(str)) + { + return new Token(TokenKind.ExtensionFunction, str, startIndex); + } + } + + // Unknown keyword + return new Token(TokenKind.UnknownKeyword, str, startIndex); + } + + private Token ReadStringToken() + { + Int32 startIndex = m_index; + Char c; + Boolean closed = false; + var str = new StringBuilder(); + m_index++; // Skip the leading single-quote. + while (m_index < m_expression.Length) + { + c = m_expression[m_index++]; + if (c == '\'') + { + // End of string. + if (m_index >= m_expression.Length || m_expression[m_index] != '\'') + { + closed = true; + break; + } + + // Escaped single quote. + m_index++; + } + + str.Append(c); + } + + Int32 length = m_index - startIndex; + String rawValue = m_expression.Substring(startIndex, length); + if (closed) + { + return new Token(TokenKind.String, rawValue, startIndex, str.ToString()); + } + + return new Token(TokenKind.Unrecognized, rawValue, startIndex); + } + + private static Boolean TestKeyword(String str, bool allowHyphen) + { + if (String.IsNullOrEmpty(str)) + { + return false; + } + + Char first = str[0]; + if ((first >= 'a' && first <= 'z') || + (first >= 'A' && first <= 'Z') || + first == '_') + { + for (Int32 i = 1 ; i < str.Length ; i++) + { + Char c = str[i]; + if ((c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + (c >= '0' && c <= '9') || + c == '_' || (allowHyphen && c == '-')) + { + // OK + } + else + { + return false; + } + } + + return true; + } + else + { + return false; + } + } + + private static Boolean TestWhitespaceOrPunctuation(Char c) + { + switch (c) + { + case ExpressionConstants.StartIndex: + case ExpressionConstants.StartParameter: + case ExpressionConstants.EndIndex: + case ExpressionConstants.EndParameter: + case ExpressionConstants.Separator: + case ExpressionConstants.Dereference: + return true; + default: + return char.IsWhiteSpace(c); + } + } + + private readonly String m_expression; // Raw expression string. + private readonly HashSet m_extensionFunctions; + private readonly HashSet m_extensionNamedValues; + private Int32 m_index; // Index of raw condition string. + private Token m_lastToken; + private readonly Boolean m_allowKeyHyphens; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/LiteralValueNode.cs b/src/Sdk/DTExpressions/Expressions/LiteralValueNode.cs new file mode 100644 index 00000000000..2639cdbcccc --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/LiteralValueNode.cs @@ -0,0 +1,47 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class LiteralValueNode : ExpressionNode + { + public LiteralValueNode(Object val) + { + ValueKind kind; + + // Note, it is OK to pass null EvaluationOptions here since the parser does not support + // localized values. For example, if parsing local date-times were supported, then we + // would need to know the account's time zone at parse time. This is an OK limitation, + // since we can defer this type of problem to runtime, for example by adding a parseDate function. + Value = ExpressionUtil.ConvertToCanonicalValue(null, val, out kind, out _, out _); + + Kind = kind; + Name = kind.ToString(); + } + + public ValueKind Kind { get; } + + public Object Value { get; } + + // Prevent the value from being stored on the evaluation context. + // This avoids unneccessarily duplicating the value in memory. + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return ExpressionUtil.FormatValue(null, Value, Kind); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + return ExpressionUtil.FormatValue(null, Value, Kind); + } + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Value; + } + } + +} diff --git a/src/Sdk/DTExpressions/Expressions/MemoryCounter.cs b/src/Sdk/DTExpressions/Expressions/MemoryCounter.cs new file mode 100644 index 00000000000..305b8b85667 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/MemoryCounter.cs @@ -0,0 +1,166 @@ +using System; +using System.ComponentModel; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Expressions +{ + /// + /// Helper class for ExpressionNode authors. This class helps calculate memory overhead for a result object. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class MemoryCounter + { + internal MemoryCounter( + ExpressionNode node, + Int32? maxBytes) + { + m_node = node; + m_maxBytes = (maxBytes ?? 0) > 0 ? maxBytes.Value : Int32.MaxValue; + } + + public Int32 CurrentBytes => m_currentBytes; + + public void Add(Int32 amount) + { + if (!TryAdd(amount)) + { + throw new InvalidOperationException(ExpressionResources.ExceededAllowedMemory(m_node?.ConvertToExpression())); + } + } + + public void Add(String value) + { + Add(CalculateSize(value)); + } + + public void Add( + JToken value, + Boolean traverse) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + if (value is null) + { + Add(MinObjectSize); + } + + if (!traverse) + { + switch (value.Type) + { + case JTokenType.Bytes: + case JTokenType.String: + case JTokenType.Uri: + Add(value.ToObject()); + return; + + case JTokenType.Property: + var property = value as JProperty; + Add(property.Name); + return; + + default: + Add(MinObjectSize); + return; + } + } + + do + { + // Descend as much as possible + while (true) + { + // Add bytes + Add(value, false); + + // Descend + if (value.HasValues) + { + value = value.First; + } + // No more descendants + else + { + break; + } + } + + // Next sibling or ancestor sibling + do + { + var sibling = value.Next; + + // Sibling found + if (sibling != null) + { + value = sibling; + break; + } + + // Ascend + value = value.Parent; + + } while (value != null); + + } while (value != null); + } + + public void AddMinObjectSize() + { + Add(MinObjectSize); + } + + public void Remove(String value) + { + m_currentBytes -= CalculateSize(value); + } + + public static Int32 CalculateSize(String value) + { + // This measurement doesn't have to be perfect. + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + Int32 bytes; + checked + { + bytes = StringBaseOverhead + ((value?.Length ?? 0) * 2); + } + return bytes; + } + + internal Boolean TryAdd(Int32 amount) + { + try + { + checked + { + amount += m_currentBytes; + } + + if (amount > m_maxBytes) + { + return false; + } + + m_currentBytes = amount; + return true; + } + catch (OverflowException) + { + return false; + } + } + + internal Boolean TryAdd(String value) + { + return TryAdd(CalculateSize(value)); + } + + internal const Int32 MinObjectSize = 24; + internal const Int32 StringBaseOverhead = 26; + private readonly Int32 m_maxBytes; + private readonly ExpressionNode m_node; + private Int32 m_currentBytes; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/NamedValueInfo.cs b/src/Sdk/DTExpressions/Expressions/NamedValueInfo.cs new file mode 100644 index 00000000000..1c74844386d --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/NamedValueInfo.cs @@ -0,0 +1,20 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public class NamedValueInfo : INamedValueInfo + where T : NamedValueNode, new() + { + public NamedValueInfo(String name) + { + Name = name; + } + + public String Name { get; } + + public NamedValueNode CreateNode() + { + return new T(); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/NamedValueNode.cs b/src/Sdk/DTExpressions/Expressions/NamedValueNode.cs new file mode 100644 index 00000000000..9dfab2a88e5 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/NamedValueNode.cs @@ -0,0 +1,24 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class NamedValueNode : ExpressionNode + { + internal sealed override string ConvertToExpression() => Name; + + protected sealed override Boolean TraceFullyRealized => true; + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return Name; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/NotEqualNode.cs b/src/Sdk/DTExpressions/Expressions/NotEqualNode.cs new file mode 100644 index 00000000000..4a54e621506 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/NotEqualNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class NotEqualNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return !Parameters[0].Evaluate(context).Equals(context, Parameters[1].Evaluate(context)); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/NotInNode.cs b/src/Sdk/DTExpressions/Expressions/NotInNode.cs new file mode 100644 index 00000000000..c7b1a5a08fe --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/NotInNode.cs @@ -0,0 +1,24 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class NotInNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + EvaluationResult left = Parameters[0].Evaluate(context); + for (Int32 i = 1; i < Parameters.Count; i++) + { + EvaluationResult right = Parameters[i].Evaluate(context); + if (left.Equals(context, right)) + { + return false; + } + } + + return true; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/NotNode.cs b/src/Sdk/DTExpressions/Expressions/NotNode.cs new file mode 100644 index 00000000000..b27ef2fc42b --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/NotNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class NotNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return !Parameters[0].EvaluateBoolean(context); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/OrNode.cs b/src/Sdk/DTExpressions/Expressions/OrNode.cs new file mode 100644 index 00000000000..513a2e5e5c8 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/OrNode.cs @@ -0,0 +1,22 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class OrNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + foreach (ExpressionNode parameter in Parameters) + { + if (parameter.EvaluateBoolean(context)) + { + return true; + } + } + + return false; + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ParseException.cs b/src/Sdk/DTExpressions/Expressions/ParseException.cs new file mode 100644 index 00000000000..489c0d87399 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ParseException.cs @@ -0,0 +1,65 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + public sealed class ParseException : ExpressionException + { + internal ParseException(ParseExceptionKind kind, Token token, String expression) + : base(secretMasker: null, message: String.Empty) + { + Expression = expression; + Kind = kind; + RawToken = token?.RawValue; + TokenIndex = token?.Index ?? 0; + String description; + switch (kind) + { + case ParseExceptionKind.ExceededMaxDepth: + description = ExpressionResources.ExceededMaxExpressionDepth(ExpressionConstants.MaxDepth); + break; + case ParseExceptionKind.ExceededMaxLength: + description = ExpressionResources.ExceededMaxExpressionLength(ExpressionConstants.MaxLength); + break; + case ParseExceptionKind.ExpectedPropertyName: + description = ExpressionResources.ExpectedPropertyName(); + break; + case ParseExceptionKind.ExpectedStartParameter: + description = ExpressionResources.ExpectedStartParameter(); + break; + case ParseExceptionKind.UnclosedFunction: + description = ExpressionResources.UnclosedFunction(); + break; + case ParseExceptionKind.UnclosedIndexer: + description = ExpressionResources.UnclosedIndexer(); + break; + case ParseExceptionKind.UnexpectedSymbol: + description = ExpressionResources.UnexpectedSymbol(); + break; + case ParseExceptionKind.UnrecognizedValue: + description = ExpressionResources.UnrecognizedValue(); + break; + default: // Should never reach here. + throw new Exception($"Unexpected parse exception kind '{kind}'."); + } + + if (token == null) + { + Message = ExpressionResources.ParseErrorWithFwlink(description); + } + else + { + Message = ExpressionResources.ParseErrorWithTokenInfo(description, RawToken, TokenIndex + 1, Expression); + } + } + + internal String Expression { get; } + + internal ParseExceptionKind Kind { get; } + + internal String RawToken { get; } + + internal Int32 TokenIndex { get; } + + public sealed override String Message { get; } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ParseExceptionKind.cs b/src/Sdk/DTExpressions/Expressions/ParseExceptionKind.cs new file mode 100644 index 00000000000..cddebfed209 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ParseExceptionKind.cs @@ -0,0 +1,14 @@ +namespace GitHub.DistributedTask.Expressions +{ + internal enum ParseExceptionKind + { + ExceededMaxDepth, + ExceededMaxLength, + ExpectedPropertyName, + ExpectedStartParameter, + UnclosedFunction, + UnclosedIndexer, + UnexpectedSymbol, + UnrecognizedValue, + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ResultMemory.cs b/src/Sdk/DTExpressions/Expressions/ResultMemory.cs new file mode 100644 index 00000000000..762980b1182 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ResultMemory.cs @@ -0,0 +1,58 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class ResultMemory + { + /// + /// Only set a non-null value when both of the following conditions are met: + /// 1) The result is a complex object. In other words, the result is + /// not a simple type: string, boolean, number, version, datetime, or null. + /// 2) The result is a newly created object. + /// + /// + /// For example, consider a function jsonParse() which takes a string parameter, + /// and returns a JToken object. The JToken object is newly created and a rough + /// measurement should be returned for the number of bytes it consumes in memory. + /// + /// + /// + /// For another example, consider a function which returns a sub-object from a + /// complex parameter value. From the perspective of an individual function, + /// the size of the complex parameter value is unknown. In this situation, set the + /// value to IntPtr.Size. + /// + /// + /// + /// When you are unsure, set the value to null. Null indicates the overhead of a + /// new pointer should be accounted for. + /// + /// + public Int32? Bytes { get; set; } + + /// + /// Indicates whether represents the total size of the result. + /// True indicates the accounting-overhead of downstream parameters can be discarded. + /// + /// For , this value is currently ignored. + /// + /// + /// For example, consider a funciton jsonParse() which takes a string paramter, + /// and returns a JToken object. The JToken object is newly created and a rough + /// measurement should be returned for the amount of bytes it consumes in memory. + /// Set the to true, since new object contains no references + /// to previously allocated memory. + /// + /// + /// + /// For another example, consider a function which wraps a complex parameter result. + /// should be set to the amount of newly allocated memory. + /// However since the object references previously allocated memory, set + /// to false. + /// + /// + public Boolean IsTotal { get; set; } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/StartsWithNode.cs b/src/Sdk/DTExpressions/Expressions/StartsWithNode.cs new file mode 100644 index 00000000000..9c5e04bb292 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/StartsWithNode.cs @@ -0,0 +1,16 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class StartsWithNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + String left = Parameters[0].EvaluateString(context) ?? String.Empty; + String right = Parameters[1].EvaluateString(context) ?? String.Empty; + return left.StartsWith(right, StringComparison.OrdinalIgnoreCase); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/Token.cs b/src/Sdk/DTExpressions/Expressions/Token.cs new file mode 100644 index 00000000000..c1788a04d12 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/Token.cs @@ -0,0 +1,28 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class Token + { + public Token(TokenKind kind, Char rawValue, Int32 index, Object parsedValue = null) + : this(kind, rawValue.ToString(), index, parsedValue) + { + } + + public Token(TokenKind kind, String rawValue, Int32 index, Object parsedValue = null) + { + Kind = kind; + RawValue = rawValue; + Index = index; + ParsedValue = parsedValue; + } + + public TokenKind Kind { get; } + + public String RawValue { get; } + + public Int32 Index { get; } + + public Object ParsedValue { get; } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/TokenKind.cs b/src/Sdk/DTExpressions/Expressions/TokenKind.cs new file mode 100644 index 00000000000..97cbf20d473 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/TokenKind.cs @@ -0,0 +1,29 @@ +namespace GitHub.DistributedTask.Expressions +{ + internal enum TokenKind + { + // Punctuation + StartIndex, + StartParameter, + EndIndex, + EndParameter, + Separator, + Dereference, + Wildcard, + + // Values + Boolean, + Number, + Version, + String, + PropertyName, + + // Functions and named-values + WellKnownFunction, + ExtensionFunction, + ExtensionNamedValue, + UnknownKeyword, + + Unrecognized, + } +} diff --git a/src/Sdk/DTExpressions/Expressions/TypeCastException.cs b/src/Sdk/DTExpressions/Expressions/TypeCastException.cs new file mode 100644 index 00000000000..59ad6afaacb --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/TypeCastException.cs @@ -0,0 +1,67 @@ +using System; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class TypeCastException : ExpressionException + { + internal TypeCastException(Type fromType, Type toType) + : base(null, String.Empty) + { + FromType = fromType; + ToType = toType; + m_message = ExpressionResources.TypeCastErrorNoValue(fromType.Name, toType.Name); + } + + internal TypeCastException(ISecretMasker secretMasker, Object value, ValueKind fromKind, ValueKind toKind) + : base(null, String.Empty) + { + Value = value; + FromKind = fromKind; + ToKind = toKind; + m_message = ExpressionResources.TypeCastError( + fromKind, // from kind + toKind, // to kind + ExpressionUtil.FormatValue(secretMasker, value, fromKind)); // value + } + + internal TypeCastException(ISecretMasker secretMasker, Object value, ValueKind fromKind, Type toType) + : base(null, String.Empty) + { + Value = value; + FromKind = fromKind; + ToType = toType; + m_message = ExpressionResources.TypeCastError( + fromKind, // from kind + toType, // to type + ExpressionUtil.FormatValue(secretMasker, value, fromKind)); // value + } + + internal TypeCastException(ISecretMasker secretMasker, Object value, ValueKind fromKind, Type toType, String error) + : base(null, String.Empty) + { + Value = value; + FromKind = fromKind; + ToType = toType; + m_message = ExpressionResources.TypeCastErrorWithError( + fromKind, // from kind + toType, // to type + ExpressionUtil.FormatValue(secretMasker, value, fromKind), // value + secretMasker != null ? secretMasker.MaskSecrets(error) : error); // error + } + + public override String Message => m_message; + + internal Object Value { get; } + + internal ValueKind? FromKind { get; } + + internal Type FromType { get; } + + internal ValueKind? ToKind { get; } + + internal Type ToType { get; } + + private readonly String m_message; + } +} diff --git a/src/Sdk/DTExpressions/Expressions/UnknownFunctionNode.cs b/src/Sdk/DTExpressions/Expressions/UnknownFunctionNode.cs new file mode 100644 index 00000000000..6f22f187407 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/UnknownFunctionNode.cs @@ -0,0 +1,13 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class UnknownFunctionNode : FunctionNode + { + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // Should never reach here. + throw new NotSupportedException("Unknown function node is not supported during evaluation."); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/UnknownNamedValueNode.cs b/src/Sdk/DTExpressions/Expressions/UnknownNamedValueNode.cs new file mode 100644 index 00000000000..a6dedc6434e --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/UnknownNamedValueNode.cs @@ -0,0 +1,13 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class UnknownNamedValueNode : NamedValueNode + { + protected sealed override object EvaluateCore(EvaluationContext evaluationContext) + { + // Should never reach here. + throw new NotSupportedException("Unknown function node is not supported during evaluation."); + } + } +} diff --git a/src/Sdk/DTExpressions/Expressions/ValueKind.cs b/src/Sdk/DTExpressions/Expressions/ValueKind.cs new file mode 100644 index 00000000000..ea3dc847749 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/ValueKind.cs @@ -0,0 +1,17 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public enum ValueKind + { + Array, + Boolean, + DateTime, + Null, + Number, + Object, + String, + Version, + } +} diff --git a/src/Sdk/DTExpressions/Expressions/XOrNode.cs b/src/Sdk/DTExpressions/Expressions/XOrNode.cs new file mode 100644 index 00000000000..e00837482e5 --- /dev/null +++ b/src/Sdk/DTExpressions/Expressions/XOrNode.cs @@ -0,0 +1,14 @@ +using System; + +namespace GitHub.DistributedTask.Expressions +{ + internal sealed class XorNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + return Parameters[0].EvaluateBoolean(context) ^ Parameters[1].EvaluateBoolean(context); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/EvaluationOptions.cs b/src/Sdk/DTExpressions2/Expressions2/EvaluationOptions.cs new file mode 100644 index 00000000000..372d7a0cf61 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/EvaluationOptions.cs @@ -0,0 +1,23 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EvaluationOptions + { + public EvaluationOptions() + { + } + + public EvaluationOptions(EvaluationOptions copy) + { + if (copy != null) + { + MaxMemory = copy.MaxMemory; + } + } + + public Int32 MaxMemory { get; set; } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/EvaluationResult.cs b/src/Sdk/DTExpressions2/Expressions2/EvaluationResult.cs new file mode 100644 index 00000000000..354a76d8e37 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/EvaluationResult.cs @@ -0,0 +1,453 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using GitHub.DistributedTask.Expressions2.Sdk; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EvaluationResult + { + internal EvaluationResult( + EvaluationContext context, + Int32 level, + Object val, + ValueKind kind, + Object raw) + : this(context, level, val, kind, raw, false) + { + } + + internal EvaluationResult( + EvaluationContext context, + Int32 level, + Object val, + ValueKind kind, + Object raw, + Boolean omitTracing) + { + m_level = level; + Value = val; + Kind = kind; + Raw = raw; + m_omitTracing = omitTracing; + + if (!omitTracing) + { + TraceValue(context); + } + } + + public ValueKind Kind { get; } + + /// + /// When an interface converter is applied to the node result, raw contains the original value + /// + public Object Raw { get; } + + public Object Value { get; } + + public Boolean IsFalsy + { + get + { + switch (Kind) + { + case ValueKind.Null: + return true; + case ValueKind.Boolean: + var boolean = (Boolean)Value; + return !boolean; + case ValueKind.Number: + var number = (Double)Value; + return number == 0d || Double.IsNaN(number); + case ValueKind.String: + var str = (String)Value; + return String.Equals(str, String.Empty, StringComparison.Ordinal); + default: + return false; + } + } + } + + public Boolean IsPrimitive => ExpressionUtility.IsPrimitive(Kind); + + public Boolean IsTruthy => !IsFalsy; + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + public Boolean AbstractEqual(EvaluationResult right) + { + return AbstractEqual(Value, right.Value); + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + public Boolean AbstractGreaterThan(EvaluationResult right) + { + return AbstractGreaterThan(Value, right.Value); + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + public Boolean AbstractGreaterThanOrEqual(EvaluationResult right) + { + return AbstractEqual(Value, right.Value) || AbstractGreaterThan(Value, right.Value); + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + public Boolean AbstractLessThan(EvaluationResult right) + { + return AbstractLessThan(Value, right.Value); + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + public Boolean AbstractLessThanOrEqual(EvaluationResult right) + { + return AbstractEqual(Value, right.Value) || AbstractLessThan(Value, right.Value); + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + public Boolean AbstractNotEqual(EvaluationResult right) + { + return !AbstractEqual(Value, right.Value); + } + + public Double ConvertToNumber() + { + return ConvertToNumber(Value); + } + + public String ConvertToString() + { + switch (Kind) + { + case ValueKind.Null: + return String.Empty; + + case ValueKind.Boolean: + return ((Boolean)Value) ? ExpressionConstants.True : ExpressionConstants.False; + + case ValueKind.Number: + return ((Double)Value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture); + + case ValueKind.String: + return Value as String; + + default: + return Kind.ToString(); + } + } + + public Boolean TryGetCollectionInterface(out Object collection) + { + if ((Kind == ValueKind.Object || Kind == ValueKind.Array)) + { + var obj = Value; + if (obj is IReadOnlyObject) + { + collection = obj; + return true; + } + else if (obj is IReadOnlyArray) + { + collection = obj; + return true; + } + } + + collection = null; + return false; + } + + /// + /// Useful for working with values that are not the direct evaluation result of a parameter. + /// This allows ExpressionNode authors to leverage the coercion and comparision functions + /// for any values. + /// + /// Also note, the value will be canonicalized (for example numeric types converted to double) and any + /// matching interfaces applied. + /// + public static EvaluationResult CreateIntermediateResult( + EvaluationContext context, + Object obj) + { + var val = ExpressionUtility.ConvertToCanonicalValue(obj, out ValueKind kind, out Object raw); + return new EvaluationResult(context, 0, val, kind, raw, omitTracing: true); + } + + private void TraceValue(EvaluationContext context) + { + if (!m_omitTracing) + { + TraceValue(context, Value, Kind); + } + } + + private void TraceValue( + EvaluationContext context, + Object val, + ValueKind kind) + { + if (!m_omitTracing) + { + TraceVerbose(context, String.Concat("=> ", ExpressionUtility.FormatValue(context?.SecretMasker, val, kind))); + } + } + + private void TraceVerbose( + EvaluationContext context, + String message) + { + if (!m_omitTracing) + { + context?.Trace.Verbose(String.Empty.PadLeft(m_level * 2, '.') + (message ?? String.Empty)); + } + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + private static Boolean AbstractEqual( + Object canonicalLeftValue, + Object canonicalRightValue) + { + CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind); + + // Same kind + if (leftKind == rightKind) + { + switch (leftKind) + { + // Null, Null + case ValueKind.Null: + return true; + + // Number, Number + case ValueKind.Number: + var leftDouble = (Double)canonicalLeftValue; + var rightDouble = (Double)canonicalRightValue; + if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble)) + { + return false; + } + return leftDouble == rightDouble; + + // String, String + case ValueKind.String: + var leftString = (String)canonicalLeftValue; + var rightString = (String)canonicalRightValue; + return String.Equals(leftString, rightString, StringComparison.OrdinalIgnoreCase); + + // Boolean, Boolean + case ValueKind.Boolean: + var leftBoolean = (Boolean)canonicalLeftValue; + var rightBoolean = (Boolean)canonicalRightValue; + return leftBoolean == rightBoolean; + + // Object, Object + case ValueKind.Object: + case ValueKind.Array: + return Object.ReferenceEquals(canonicalLeftValue, canonicalRightValue); + } + } + + return false; + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + private static Boolean AbstractGreaterThan( + Object canonicalLeftValue, + Object canonicalRightValue) + { + CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind); + + // Same kind + if (leftKind == rightKind) + { + switch (leftKind) + { + // Number, Number + case ValueKind.Number: + var leftDouble = (Double)canonicalLeftValue; + var rightDouble = (Double)canonicalRightValue; + if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble)) + { + return false; + } + return leftDouble > rightDouble; + + // String, String + case ValueKind.String: + var leftString = (String)canonicalLeftValue; + var rightString = (String)canonicalRightValue; + return String.Compare(leftString, rightString, StringComparison.OrdinalIgnoreCase) > 0; + + // Boolean, Boolean + case ValueKind.Boolean: + var leftBoolean = (Boolean)canonicalLeftValue; + var rightBoolean = (Boolean)canonicalRightValue; + return leftBoolean && !rightBoolean; + } + } + + return false; + } + + /// + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except string comparison is OrdinalIgnoreCase, and objects are not coerced to primitives. + /// + private static Boolean AbstractLessThan( + Object canonicalLeftValue, + Object canonicalRightValue) + { + CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out var leftKind, out var rightKind); + + // Same kind + if (leftKind == rightKind) + { + switch (leftKind) + { + // Number, Number + case ValueKind.Number: + var leftDouble = (Double)canonicalLeftValue; + var rightDouble = (Double)canonicalRightValue; + if (Double.IsNaN(leftDouble) || Double.IsNaN(rightDouble)) + { + return false; + } + return leftDouble < rightDouble; + + // String, String + case ValueKind.String: + var leftString = (String)canonicalLeftValue; + var rightString = (String)canonicalRightValue; + return String.Compare(leftString, rightString, StringComparison.OrdinalIgnoreCase) < 0; + + // Boolean, Boolean + case ValueKind.Boolean: + var leftBoolean = (Boolean)canonicalLeftValue; + var rightBoolean = (Boolean)canonicalRightValue; + return !leftBoolean && rightBoolean; + } + } + + return false; + } + + /// Similar to the Javascript abstract equality comparison algorithm http://www.ecma-international.org/ecma-262/5.1/#sec-11.9.3. + /// Except objects are not coerced to primitives. + private static void CoerceTypes( + ref Object canonicalLeftValue, + ref Object canonicalRightValue, + out ValueKind leftKind, + out ValueKind rightKind) + { + leftKind = GetKind(canonicalLeftValue); + rightKind = GetKind(canonicalRightValue); + + // Same kind + if (leftKind == rightKind) + { + } + // Number, String + else if (leftKind == ValueKind.Number && rightKind == ValueKind.String) + { + canonicalRightValue = ConvertToNumber(canonicalRightValue); + rightKind = ValueKind.Number; + } + // String, Number + else if (leftKind == ValueKind.String && rightKind == ValueKind.Number) + { + canonicalLeftValue = ConvertToNumber(canonicalLeftValue); + leftKind = ValueKind.Number; + } + // Boolean|Null, Any + else if (leftKind == ValueKind.Boolean || leftKind == ValueKind.Null) + { + canonicalLeftValue = ConvertToNumber(canonicalLeftValue); + CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out leftKind, out rightKind); + } + // Any, Boolean|Null + else if (rightKind == ValueKind.Boolean || rightKind == ValueKind.Null) + { + canonicalRightValue = ConvertToNumber(canonicalRightValue); + CoerceTypes(ref canonicalLeftValue, ref canonicalRightValue, out leftKind, out rightKind); + } + } + + /// + /// For primitives, follows the Javascript rules (the Number function in Javascript). Otherwise NaN. + /// + private static Double ConvertToNumber(Object canonicalValue) + { + var kind = GetKind(canonicalValue); + switch (kind) + { + case ValueKind.Null: + return 0d; + case ValueKind.Boolean: + return (Boolean)canonicalValue ? 1d : 0d; + case ValueKind.Number: + return (Double)canonicalValue; + case ValueKind.String: + return ExpressionUtility.ParseNumber(canonicalValue as String); + } + + return Double.NaN; + } + + private static ValueKind GetKind(Object canonicalValue) + { + if (Object.ReferenceEquals(canonicalValue, null)) + { + return ValueKind.Null; + } + else if (canonicalValue is Boolean) + { + return ValueKind.Boolean; + } + else if (canonicalValue is Double) + { + return ValueKind.Number; + } + else if (canonicalValue is String) + { + return ValueKind.String; + } + else if (canonicalValue is IReadOnlyObject) + { + return ValueKind.Object; + } + else if (canonicalValue is IReadOnlyArray) + { + return ValueKind.Array; + } + + return ValueKind.Object; + } + + private readonly Int32 m_level; + private readonly Boolean m_omitTracing; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ExpressionConstants.cs b/src/Sdk/DTExpressions2/Expressions2/ExpressionConstants.cs new file mode 100644 index 00000000000..5a26d50e459 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ExpressionConstants.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Expressions2.Sdk.Functions; + +namespace GitHub.DistributedTask.Expressions2 +{ + internal static class ExpressionConstants + { + static ExpressionConstants() + { + AddFunction("contains", 2, 2); + AddFunction("endsWith", 2, 2); + AddFunction("format", 1, Byte.MaxValue); + AddFunction("join", 1, 2); + AddFunction("startsWith", 2, 2); + AddFunction("toJson", 1, 1); + AddFunction("hashFiles", 1, 1); + } + + private static void AddFunction(String name, Int32 minParameters, Int32 maxParameters) + where T : Function, new() + { + WellKnownFunctions.Add(name, new FunctionInfo(name, minParameters, maxParameters)); + } + + internal static readonly String False = "false"; + internal static readonly String Infinity = "Infinity"; + internal static readonly Int32 MaxDepth = 50; + internal static readonly Int32 MaxLength = 21000; // Under 85,000 large object heap threshold, even if .NET switches to UTF-32 + internal static readonly String NaN = "NaN"; + internal static readonly String NegativeInfinity = "-Infinity"; + internal static readonly String Null = "null"; + internal static readonly String NumberFormat = "G15"; + internal static readonly String True = "true"; + internal static readonly Dictionary WellKnownFunctions = new Dictionary(StringComparer.OrdinalIgnoreCase); + + // Punctuation + internal const Char StartGroup = '('; // logical grouping + internal const Char StartIndex = '['; + internal const Char StartParameter = '('; // function call + internal const Char EndGroup = ')'; // logical grouping + internal const Char EndIndex = ']'; + internal const Char EndParameter = ')'; // function calll + internal const Char Separator = ','; + internal const Char Dereference = '.'; + internal const Char Wildcard = '*'; + + // Operators + internal const String Not = "!"; + internal const String NotEqual = "!="; + internal const String GreaterThan = ">"; + internal const String GreaterThanOrEqual = ">="; + internal const String LessThan = "<"; + internal const String LessThanOrEqual = "<="; + internal const String Equal = "=="; + internal const String And = "&&"; + internal const String Or = "||"; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ExpressionException.cs b/src/Sdk/DTExpressions2/Expressions2/ExpressionException.cs new file mode 100644 index 00000000000..c50a015437c --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ExpressionException.cs @@ -0,0 +1,24 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class ExpressionException : Exception + { + internal ExpressionException(ISecretMasker secretMasker, String message) + { + if (secretMasker != null) + { + message = secretMasker.MaskSecrets(message); + } + + m_message = message; + } + + public override String Message => m_message; + + private readonly String m_message; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ExpressionParser.cs b/src/Sdk/DTExpressions2/Expressions2/ExpressionParser.cs new file mode 100644 index 00000000000..17638deda91 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ExpressionParser.cs @@ -0,0 +1,471 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Expressions2.Sdk.Operators; +using GitHub.DistributedTask.Expressions2.Tokens; + +namespace GitHub.DistributedTask.Expressions2 +{ + using GitHub.DistributedTask.Expressions2.Sdk; + using GitHub.DistributedTask.Expressions2.Sdk.Functions; + + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ExpressionParser + { + public IExpressionNode CreateTree( + String expression, + ITraceWriter trace, + IEnumerable namedValues, + IEnumerable functions) + { + var context = new ParseContext(expression, trace, namedValues, functions); + context.Trace.Info($"Parsing expression: <{expression}>"); + return CreateTree(context); + } + + public IExpressionNode ValidateSyntax( + String expression, + ITraceWriter trace) + { + var context = new ParseContext(expression, trace, namedValues: null, functions: null, allowUnknownKeywords: true); + context.Trace.Info($"Validating expression syntax: <{expression}>"); + return CreateTree(context); + } + + private static IExpressionNode CreateTree(ParseContext context) + { + // Push the tokens + while (context.LexicalAnalyzer.TryGetNextToken(ref context.Token)) + { + // Unexpected + if (context.Token.Kind == TokenKind.Unexpected) + { + throw new ParseException(ParseExceptionKind.UnexpectedSymbol, context.Token, context.Expression); + } + // Operator + else if (context.Token.IsOperator) + { + PushOperator(context); + } + // Operand + else + { + PushOperand(context); + } + + context.LastToken = context.Token; + } + + // No tokens + if (context.LastToken == null) + { + return null; + } + + // Check unexpected end of expression + if (context.Operators.Count > 0) + { + var unexpectedLastToken = false; + switch (context.LastToken.Kind) + { + case TokenKind.EndGroup: // ")" logical grouping + case TokenKind.EndIndex: // "]" + case TokenKind.EndParameters: // ")" function call + // Legal + break; + case TokenKind.Function: + // Illegal + unexpectedLastToken = true; + break; + default: + unexpectedLastToken = context.LastToken.IsOperator; + break; + } + + if (unexpectedLastToken || context.LexicalAnalyzer.UnclosedTokens.Any()) + { + throw new ParseException(ParseExceptionKind.UnexpectedEndOfExpression, context.LastToken, context.Expression); + } + } + + // Flush operators + while (context.Operators.Count > 0) + { + FlushTopOperator(context); + } + + // Check max depth + var result = context.Operands.Single(); + CheckMaxDepth(context, result); + + return result; + } + + private static void PushOperand(ParseContext context) + { + // Create the node + var node = default(ExpressionNode); + switch (context.Token.Kind) + { + // Function + case TokenKind.Function: + var function = context.Token.RawValue; + if (TryGetFunctionInfo(context, function, out var functionInfo)) + { + node = functionInfo.CreateNode(); + node.Name = function; + } + else if (context.AllowUnknownKeywords) + { + node = new NoOperation(); + node.Name = function; + } + else + { + throw new ParseException(ParseExceptionKind.UnrecognizedFunction, context.Token, context.Expression); + } + break; + + // Named-value + case TokenKind.NamedValue: + var name = context.Token.RawValue; + if (context.ExtensionNamedValues.TryGetValue(name, out var namedValueInfo)) + { + node = namedValueInfo.CreateNode(); + node.Name = name; + + } + else if (context.AllowUnknownKeywords) + { + node = new NoOperationNamedValue(); + node.Name = name; + } + else + { + throw new ParseException(ParseExceptionKind.UnrecognizedNamedValue, context.Token, context.Expression); + } + break; + + // Otherwise simple + default: + node = context.Token.ToNode(); + break; + } + + // Push the operand + context.Operands.Push(node); + } + + private static void PushOperator(ParseContext context) + { + // Flush higher or equal precedence + if (context.Token.Associativity == Associativity.LeftToRight) + { + var precedence = context.Token.Precedence; + while (context.Operators.Count > 0) + { + var topOperator = context.Operators.Peek(); + if (precedence <= topOperator.Precedence && + topOperator.Kind != TokenKind.StartGroup && // Unless top is "(" logical grouping + topOperator.Kind != TokenKind.StartIndex && // or unless top is "[" + topOperator.Kind != TokenKind.StartParameters &&// or unless top is "(" function call + topOperator.Kind != TokenKind.Separator) // or unless top is "," + { + FlushTopOperator(context); + continue; + } + + break; + } + } + + // Push the operator + context.Operators.Push(context.Token); + + // Process closing operators now, since context.LastToken is required + // to accurately process TokenKind.EndParameters + switch (context.Token.Kind) + { + case TokenKind.EndGroup: // ")" logical grouping + case TokenKind.EndIndex: // "]" + case TokenKind.EndParameters: // ")" function call + FlushTopOperator(context); + break; + } + } + + private static void FlushTopOperator(ParseContext context) + { + // Special handling for closing operators + switch (context.Operators.Peek().Kind) + { + case TokenKind.EndIndex: // "]" + FlushTopEndIndex(context); + return; + + case TokenKind.EndGroup: // ")" logical grouping + FlushTopEndGroup(context); + return; + + case TokenKind.EndParameters: // ")" function call + FlushTopEndParameters(context); + return; + } + + // Pop the operator + var @operator = context.Operators.Pop(); + + // Create the node + var node = (Container)@operator.ToNode(); + + // Pop the operands, add to the node + var operands = PopOperands(context, @operator.OperandCount); + foreach (var operand in operands) + { + // Flatten nested And + if (node is And) + { + if (operand is And nestedAnd) + { + foreach (var nestedParameter in nestedAnd.Parameters) + { + node.AddParameter(nestedParameter); + } + + continue; + } + } + // Flatten nested Or + else if (node is Or) + { + if (operand is Or nestedOr) + { + foreach (var nestedParameter in nestedOr.Parameters) + { + node.AddParameter(nestedParameter); + } + + continue; + } + } + + node.AddParameter(operand); + } + + // Push the node to the operand stack + context.Operands.Push(node); + } + + /// + /// Flushes the ")" logical grouping operator + /// + private static void FlushTopEndGroup(ParseContext context) + { + // Pop the operators + PopOperator(context, TokenKind.EndGroup); // ")" logical grouping + PopOperator(context, TokenKind.StartGroup); // "(" logical grouping + } + + /// + /// Flushes the "]" operator + /// + private static void FlushTopEndIndex(ParseContext context) + { + // Pop the operators + PopOperator(context, TokenKind.EndIndex); // "]" + var @operator = PopOperator(context, TokenKind.StartIndex); // "[" + + // Create the node + var node = (Container)@operator.ToNode(); + + // Pop the operands, add to the node + var operands = PopOperands(context, @operator.OperandCount); + foreach (var operand in operands) + { + node.AddParameter(operand); + } + + // Push the node to the operand stack + context.Operands.Push(node); + } + + // ")" function call + private static void FlushTopEndParameters(ParseContext context) + { + // Pop the operator + var @operator = PopOperator(context, TokenKind.EndParameters); // ")" function call + + // Sanity check top operator is the current token + if (!Object.ReferenceEquals(@operator, context.Token)) + { + throw new InvalidOperationException("Expected the operator to be the current token"); + } + + var function = default(Function); + + // No parameters + if (context.LastToken.Kind == TokenKind.StartParameters) + { + // Node already exists on the operand stack + function = (Function)context.Operands.Peek(); + } + // Has parameters + else + { + // Pop the operands + var parameterCount = 1; + while (context.Operators.Peek().Kind == TokenKind.Separator) + { + parameterCount++; + context.Operators.Pop(); + } + var functionOperands = PopOperands(context, parameterCount); + + // Node already exists on the operand stack + function = (Function)context.Operands.Peek(); + + // Add the operands to the node + foreach (var operand in functionOperands) + { + function.AddParameter(operand); + } + } + + // Pop the "(" operator too + @operator = PopOperator(context, TokenKind.StartParameters); + + // Check min/max parameter count + TryGetFunctionInfo(context, function.Name, out var functionInfo); + if (functionInfo == null && context.AllowUnknownKeywords) + { + // Don't check min/max + } + else if (function.Parameters.Count < functionInfo.MinParameters) + { + throw new ParseException(ParseExceptionKind.TooFewParameters, token: @operator, expression: context.Expression); + } + else if (function.Parameters.Count > functionInfo.MaxParameters) + { + throw new ParseException(ParseExceptionKind.TooManyParameters, token: @operator, expression: context.Expression); + } + } + + /// + /// Pops N operands from the operand stack. The operands are returned + /// in their natural listed order, i.e. not last-in-first-out. + /// + private static List PopOperands( + ParseContext context, + Int32 count) + { + var result = new List(); + while (count-- > 0) + { + result.Add(context.Operands.Pop()); + } + + result.Reverse(); + return result; + } + + /// + /// Pops an operator and asserts it is the expected kind. + /// + private static Token PopOperator( + ParseContext context, + TokenKind expected) + { + var token = context.Operators.Pop(); + if (token.Kind != expected) + { + throw new NotSupportedException($"Expected operator '{expected}' to be popped. Actual '{token.Kind}'."); + } + return token; + } + + /// + /// Checks the max depth of the expression tree + /// + private static void CheckMaxDepth( + ParseContext context, + ExpressionNode node, + Int32 depth = 1) + { + if (depth > ExpressionConstants.MaxDepth) + { + throw new ParseException(ParseExceptionKind.ExceededMaxDepth, token: null, expression: context.Expression); + } + + if (node is Container container) + { + foreach (var parameter in container.Parameters) + { + CheckMaxDepth(context, parameter, depth + 1); + } + } + } + + private static Boolean TryGetFunctionInfo( + ParseContext context, + String name, + out IFunctionInfo functionInfo) + { + return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) || + context.ExtensionFunctions.TryGetValue(name, out functionInfo); + } + + private sealed class ParseContext + { + public Boolean AllowUnknownKeywords; + public readonly String Expression; + public readonly Dictionary ExtensionFunctions = new Dictionary(StringComparer.OrdinalIgnoreCase); + public readonly Dictionary ExtensionNamedValues = new Dictionary(StringComparer.OrdinalIgnoreCase); + public readonly LexicalAnalyzer LexicalAnalyzer; + public readonly Stack Operands = new Stack(); + public readonly Stack Operators = new Stack(); + public readonly ITraceWriter Trace; + public Token Token; + public Token LastToken; + + public ParseContext( + String expression, + ITraceWriter trace, + IEnumerable namedValues, + IEnumerable functions, + Boolean allowUnknownKeywords = false) + { + Expression = expression ?? String.Empty; + if (Expression.Length > ExpressionConstants.MaxLength) + { + throw new ParseException(ParseExceptionKind.ExceededMaxLength, token: null, expression: Expression); + } + + Trace = trace ?? new NoOperationTraceWriter(); + foreach (var namedValueInfo in (namedValues ?? new INamedValueInfo[0])) + { + ExtensionNamedValues.Add(namedValueInfo.Name, namedValueInfo); + } + + foreach (var functionInfo in (functions ?? new IFunctionInfo[0])) + { + ExtensionFunctions.Add(functionInfo.Name, functionInfo); + } + + LexicalAnalyzer = new LexicalAnalyzer(Expression); + AllowUnknownKeywords = allowUnknownKeywords; + } + + private class NoOperationTraceWriter : ITraceWriter + { + public void Info(String message) + { + } + + public void Verbose(String message) + { + } + } + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/FunctionInfo.cs b/src/Sdk/DTExpressions2/Expressions2/FunctionInfo.cs new file mode 100644 index 00000000000..0fd0bf04cb7 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/FunctionInfo.cs @@ -0,0 +1,29 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2.Sdk; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class FunctionInfo : IFunctionInfo + where T : Function, new() + { + public FunctionInfo(String name, Int32 minParameters, Int32 maxParameters) + { + Name = name; + MinParameters = minParameters; + MaxParameters = maxParameters; + } + + public String Name { get; } + + public Int32 MinParameters { get; } + + public Int32 MaxParameters { get; } + + public Function CreateNode() + { + return new T(); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/IExpressionNode.cs b/src/Sdk/DTExpressions2/Expressions2/IExpressionNode.cs new file mode 100644 index 00000000000..57a9e0cf90b --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/IExpressionNode.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IExpressionNode + { + /// + /// Evaluates the expression and returns the result, wrapped in a helper + /// for converting, comparing, and traversing objects. + /// + /// Optional trace writer + /// Optional secret masker + /// State object for custom evaluation function nodes and custom named-value nodes + /// Evaluation options + EvaluationResult Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/IExpressionNodeExtensions.cs b/src/Sdk/DTExpressions2/Expressions2/IExpressionNodeExtensions.cs new file mode 100644 index 00000000000..ab64596dabc --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/IExpressionNodeExtensions.cs @@ -0,0 +1,237 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.DistributedTask.Logging; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Expressions2.Sdk.Operators; + +namespace GitHub.DistributedTask.Expressions2 +{ + [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] + public static class IExpressionNodeExtensions + { + /// + /// Returns the node and all descendant nodes + /// + public static IEnumerable Traverse(this IExpressionNode node) + { + yield return node; + + if (node is Container container && container.Parameters.Count > 0) + { + foreach (var parameter in container.Parameters) + { + foreach (var descendant in parameter.Traverse()) + { + yield return descendant; + } + } + } + } + + /// + /// Checks whether specific contexts or sub-properties of contexts are referenced. + /// If a conclusive determination cannot be made, then the pattern is considered matched. + /// For example, the expression "toJson(github)" matches the pattern "github.event" because + /// the value is passed to a function. Not enough information is known to determine whether + /// the function requires the sub-property. Therefore, it is assumed that it may. + /// + /// Wildcards are supported in the pattern, and are treated as matching any literal. + /// For example, the expression "needs.my-job.outputs.my-output" matches the pattern "needs.*.outputs". + /// + public static Boolean[] CheckReferencesContext( + this IExpressionNode tree, + params String[] patterns) + { + var result = new Boolean[patterns.Length]; + + var segmentedPatterns = default(Stack[]); + + // Walk the tree + var stack = new Stack(); + stack.Push(tree); + while (stack.Count > 0) + { + var node = stack.Pop(); + + // Attempt to match a named-value or index operator. + // Note, do not push children of the index operator. + if (node is NamedValue || node is Index) + { + // Lazy initialize the pattern segments + if (segmentedPatterns is null) + { + segmentedPatterns = new Stack[patterns.Length]; + var parser = new ExpressionParser(); + for (var i = 0; i < patterns.Length; i++) + { + var pattern = patterns[i]; + var patternTree = parser.ValidateSyntax(pattern, null); + var patternSegments = GetMatchSegments(patternTree); + if (patternSegments.Count == 0) + { + throw new InvalidOperationException($"Invalid context-match-pattern '{pattern}'"); + } + segmentedPatterns[i] = patternSegments; + } + } + + // Match + Match(node, segmentedPatterns, result); + } + // Push children of any other container node. + else if (node is Container container && container.Parameters.Count > 0) + { + foreach (var child in container.Parameters) + { + stack.Push(child); + } + } + } + + return result; + } + + private static void Match( + IExpressionNode node, + Stack[] patterns, + Boolean[] result) + { + var nodeSegments = GetMatchSegments(node); + + if (nodeSegments.Count == 0) + { + return; + } + + var nodeNamedValue = nodeSegments.Peek() as NamedValue; + var originalNodeSegments = nodeSegments; + + for (var i = 0; i < patterns.Length; i++) + { + var patternSegments = patterns[i]; + var patternNamedValue = patternSegments.Peek() as NamedValue; + + // Compare the named-value + if (String.Equals(nodeNamedValue.Name, patternNamedValue.Name, StringComparison.OrdinalIgnoreCase)) + { + // Clone the stacks before mutating + nodeSegments = new Stack(originalNodeSegments.Reverse()); + nodeSegments.Pop(); + patternSegments = new Stack(patternSegments.Reverse()); + patternSegments.Pop(); + + // Walk the stacks + while (true) + { + // Every pattern segment was matched + if (patternSegments.Count == 0) + { + result[i] = true; + break; + } + // Every node segment was matched. Treat the pattern as matched. There is not + // enough information to determine whether the property is required; assume it is. + // For example, consider the pattern "github.event" and the expression "toJson(github)". + // In this example the function requires the full structure of the named-value. + else if (nodeSegments.Count == 0) + { + result[i] = true; + break; + } + + var nodeSegment = nodeSegments.Pop(); + var patternSegment = patternSegments.Pop(); + + // The behavior of a wildcard varies depending on whether the left operand + // is an array or an object. For simplicity, treat the pattern as matched. + if (nodeSegment is Wildcard) + { + result[i] = true; + break; + } + // Treat a wildcard pattern segment as matching any literal segment + else if (patternSegment is Wildcard) + { + continue; + } + + // Convert literals to string and compare + var nodeLiteral = nodeSegment as Literal; + var nodeEvaluationResult = EvaluationResult.CreateIntermediateResult(null, nodeLiteral.Value); + var nodeString = nodeEvaluationResult.ConvertToString(); + var patternLiteral = patternSegment as Literal; + var patternEvaluationResult = EvaluationResult.CreateIntermediateResult(null, patternLiteral.Value); + var patternString = patternEvaluationResult.ConvertToString(); + if (String.Equals(nodeString, patternString, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + // Convert to number and compare + var nodeNumber = nodeEvaluationResult.ConvertToNumber(); + if (!Double.IsNaN(nodeNumber) && nodeNumber >= 0d && nodeNumber <= (Double)Int32.MaxValue) + { + var patternNumber = patternEvaluationResult.ConvertToNumber(); + if (!Double.IsNaN(patternNumber) && patternNumber >= 0 && patternNumber <= (Double)Int32.MaxValue) + { + nodeNumber = Math.Floor(nodeNumber); + patternNumber = Math.Floor(patternNumber); + if (nodeNumber == patternNumber) + { + continue; + } + } + } + + // Not matched + break; + } + } + } + } + + private static Stack GetMatchSegments(IExpressionNode node) + { + var result = new Stack(); + + // Node is a named-value + if (node is NamedValue) + { + result.Push(node); + } + // Node is an index + else if (node is Index index) + { + while (true) + { + // Push parameter 1. Treat anything other than literal as a wildcard. + var parameter1 = index.Parameters[1]; + result.Push(parameter1 is Literal ? parameter1 : new Wildcard()); + + var parameter0 = index.Parameters[0]; + + // Parameter 0 is a named-value + if (parameter0 is NamedValue) + { + result.Push(parameter0); + break; + } + // Parameter 0 is an index + else if (parameter0 is Index index2) + { + index = index2; + } + // Otherwise clear + else + { + result.Clear(); + break; + } + } + } + + return result; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/IFunctionInfo.cs b/src/Sdk/DTExpressions2/Expressions2/IFunctionInfo.cs new file mode 100644 index 00000000000..e3c5f1a573a --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/IFunctionInfo.cs @@ -0,0 +1,15 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2.Sdk; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IFunctionInfo + { + String Name { get; } + Int32 MinParameters { get; } + Int32 MaxParameters { get; } + Function CreateNode(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/INamedValueInfo.cs b/src/Sdk/DTExpressions2/Expressions2/INamedValueInfo.cs new file mode 100644 index 00000000000..ccec712fd1b --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/INamedValueInfo.cs @@ -0,0 +1,13 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2.Sdk; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface INamedValueInfo + { + String Name { get; } + NamedValue CreateNode(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ITraceWriter.cs b/src/Sdk/DTExpressions2/Expressions2/ITraceWriter.cs new file mode 100644 index 00000000000..c9724ab4094 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ITraceWriter.cs @@ -0,0 +1,12 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITraceWriter + { + void Info(String message); + void Verbose(String message); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/NamedValueInfo.cs b/src/Sdk/DTExpressions2/Expressions2/NamedValueInfo.cs new file mode 100644 index 00000000000..8c657172e86 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/NamedValueInfo.cs @@ -0,0 +1,23 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2.Sdk; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class NamedValueInfo : INamedValueInfo + where T : NamedValue, new() + { + public NamedValueInfo(String name) + { + Name = name; + } + + public String Name { get; } + + public NamedValue CreateNode() + { + return new T(); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ParseException.cs b/src/Sdk/DTExpressions2/Expressions2/ParseException.cs new file mode 100644 index 00000000000..19529a4be94 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ParseException.cs @@ -0,0 +1,68 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2.Tokens; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ParseException : ExpressionException + { + internal ParseException(ParseExceptionKind kind, Token token, String expression) + : base(secretMasker: null, message: String.Empty) + { + Expression = expression; + Kind = kind; + RawToken = token?.RawValue; + TokenIndex = token?.Index ?? 0; + String description; + switch (kind) + { + case ParseExceptionKind.ExceededMaxDepth: + description = $"Exceeded max expression depth {ExpressionConstants.MaxDepth}"; + break; + case ParseExceptionKind.ExceededMaxLength: + description = $"Exceeded max expression length {ExpressionConstants.MaxLength}"; + break; + case ParseExceptionKind.TooFewParameters: + description = "Too few parameters supplied"; + break; + case ParseExceptionKind.TooManyParameters: + description = "Too many parameters supplied"; + break; + case ParseExceptionKind.UnexpectedEndOfExpression: + description = "Unexpected end of expression"; + break; + case ParseExceptionKind.UnexpectedSymbol: + description = "Unexpected symbol"; + break; + case ParseExceptionKind.UnrecognizedFunction: + description = "Unrecognized function"; + break; + case ParseExceptionKind.UnrecognizedNamedValue: + description = "Unrecognized named-value"; + break; + default: // Should never reach here. + throw new Exception($"Unexpected parse exception kind '{kind}'."); + } + + if (token == null) + { + Message = description; + } + else + { + Message = $"{description}: '{RawToken}'. Located at position {TokenIndex + 1} within expression: {Expression}"; + } + } + + internal String Expression { get; } + + internal ParseExceptionKind Kind { get; } + + internal String RawToken { get; } + + internal Int32 TokenIndex { get; } + + public sealed override String Message { get; } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ParseExceptionKind.cs b/src/Sdk/DTExpressions2/Expressions2/ParseExceptionKind.cs new file mode 100644 index 00000000000..4e4fc6a3d6c --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ParseExceptionKind.cs @@ -0,0 +1,14 @@ +namespace GitHub.DistributedTask.Expressions2 +{ + internal enum ParseExceptionKind + { + ExceededMaxDepth, + ExceededMaxLength, + TooFewParameters, + TooManyParameters, + UnexpectedEndOfExpression, + UnexpectedSymbol, + UnrecognizedFunction, + UnrecognizedNamedValue, + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Container.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Container.cs new file mode 100644 index 00000000000..91e6552332e --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Container.cs @@ -0,0 +1,20 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class Container : ExpressionNode + { + public IReadOnlyList Parameters => m_parameters.AsReadOnly(); + + public void AddParameter(ExpressionNode node) + { + m_parameters.Add(node); + node.Container = this; + } + + private readonly List m_parameters = new List(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationContext.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationContext.cs new file mode 100644 index 00000000000..e58ee27f174 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationContext.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Logging; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EvaluationContext + { + internal EvaluationContext( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options, + ExpressionNode node) + { + ArgumentUtility.CheckForNull(trace, nameof(trace)); + ArgumentUtility.CheckForNull(secretMasker, nameof(secretMasker)); + Trace = trace; + SecretMasker = secretMasker; + State = state; + + // Copy the options + options = new EvaluationOptions(copy: options); + if (options.MaxMemory == 0) + { + // Set a reasonable default max memory + options.MaxMemory = 1048576; // 1 mb + } + Options = options; + Memory = new EvaluationMemory(options.MaxMemory, node); + + m_traceResults = new Dictionary(); + m_traceMemory = new MemoryCounter(null, options.MaxMemory); + } + + public ITraceWriter Trace { get; } + + public ISecretMasker SecretMasker { get; } + + public Object State { get; } + + internal EvaluationMemory Memory { get; } + + internal EvaluationOptions Options { get; } + + internal void SetTraceResult( + ExpressionNode node, + EvaluationResult result) + { + // Remove if previously added. This typically should not happen. This could happen + // due to a badly authored function. So we'll handle it and track memory correctly. + if (m_traceResults.TryGetValue(node, out String oldValue)) + { + m_traceMemory.Remove(oldValue); + m_traceResults.Remove(node); + } + + // Check max memory + String value = ExpressionUtility.FormatValue(SecretMasker, result); + if (m_traceMemory.TryAdd(value)) + { + // Store the result + m_traceResults[node] = value; + } + } + + internal Boolean TryGetTraceResult(ExpressionNode node, out String value) + { + return m_traceResults.TryGetValue(node, out value); + } + + private readonly Dictionary m_traceResults = new Dictionary(); + private readonly MemoryCounter m_traceMemory; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationMemory.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationMemory.cs new file mode 100644 index 00000000000..0688596c773 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationMemory.cs @@ -0,0 +1,112 @@ +using System; +using System.Collections.Generic; +using ExpressionResources = GitHub.DistributedTask.Expressions.ExpressionResources; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + /// + /// This is an internal class only. + /// + /// This class is used to track current memory consumption + /// across the entire expression evaluation. + /// + internal sealed class EvaluationMemory + { + internal EvaluationMemory( + Int32 maxBytes, + ExpressionNode node) + { + m_maxAmount = maxBytes; + m_node = node; + } + + internal void AddAmount( + Int32 depth, + Int32 bytes, + Boolean trimDepth = false) + { + // Trim deeper depths + if (trimDepth) + { + while (m_maxActiveDepth > depth) + { + var amount = m_depths[m_maxActiveDepth]; + + if (amount > 0) + { + // Sanity check + if (amount > m_totalAmount) + { + throw new InvalidOperationException("Bytes to subtract exceeds total bytes"); + } + + // Subtract from the total + checked + { + m_totalAmount -= amount; + } + + // Reset the amount + m_depths[m_maxActiveDepth] = 0; + } + + m_maxActiveDepth--; + } + } + + // Grow the depths + if (depth > m_maxActiveDepth) + { + // Grow the list + while (m_depths.Count <= depth) + { + m_depths.Add(0); + } + + // Adjust the max active depth + m_maxActiveDepth = depth; + } + + checked + { + // Add to the depth + m_depths[depth] += bytes; + + // Add to the total + m_totalAmount += bytes; + } + + // Check max + if (m_totalAmount > m_maxAmount) + { + throw new InvalidOperationException(ExpressionResources.ExceededAllowedMemory(m_node?.ConvertToExpression())); + } + } + + internal static Int32 CalculateBytes(Object obj) + { + if (obj is String str) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + checked + { + return c_stringBaseOverhead + ((str?.Length ?? 0) * sizeof(Char)); + } + } + else + { + return c_minObjectSize; + } + } + + private const Int32 c_minObjectSize = 24; + private const Int32 c_stringBaseOverhead = 26; + private readonly List m_depths = new List(); + private readonly Int32 m_maxAmount; + private readonly ExpressionNode m_node; + private Int32 m_maxActiveDepth = -1; + private Int32 m_totalAmount; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationTraceWriter.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationTraceWriter.cs new file mode 100644 index 00000000000..87517cdeb8a --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/EvaluationTraceWriter.cs @@ -0,0 +1,37 @@ +using System; +using GitHub.DistributedTask.Logging; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + internal sealed class EvaluationTraceWriter : ITraceWriter + { + public EvaluationTraceWriter(ITraceWriter trace, ISecretMasker secretMasker) + { + ArgumentUtility.CheckForNull(secretMasker, nameof(secretMasker)); + m_trace = trace; + m_secretMasker = secretMasker; + } + + public void Info(String message) + { + if (m_trace != null) + { + message = m_secretMasker.MaskSecrets(message); + m_trace.Info(message); + } + } + + public void Verbose(String message) + { + if (m_trace != null) + { + message = m_secretMasker.MaskSecrets(message); + m_trace.Verbose(message); + } + } + + private readonly ISecretMasker m_secretMasker; + private readonly ITraceWriter m_trace; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionNode.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionNode.cs new file mode 100644 index 00000000000..28e7b37ecde --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionNode.cs @@ -0,0 +1,192 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ExpressionNode : IExpressionNode + { + internal Container Container { get; set; } + + internal Int32 Level { get; private set; } + + /// + /// The name is used for tracing. Normally the parser will set the name. However if a node + /// is added manually, then the name may not be set and will fallback to the type name. + /// + protected internal String Name + { + get + { + return !String.IsNullOrEmpty(m_name) ? m_name : this.GetType().Name; + } + + set + { + m_name = value; + } + } + + /// + /// Indicates whether the evalation result should be stored on the context and used + /// when the realized result is traced. + /// + protected abstract Boolean TraceFullyRealized { get; } + + /// + /// IExpressionNode entry point. + /// + EvaluationResult IExpressionNode.Evaluate( + ITraceWriter trace, + ISecretMasker secretMasker, + Object state, + EvaluationOptions options) + { + if (Container != null) + { + // Do not localize. This is an SDK consumer error. + throw new NotSupportedException($"Expected {nameof(IExpressionNode)}.{nameof(Evaluate)} to be called on root node only."); + } + + + var originalSecretMasker = secretMasker; + try + { + // Evaluate + secretMasker = secretMasker?.Clone() ?? new SecretMasker(); + trace = new EvaluationTraceWriter(trace, secretMasker); + var context = new EvaluationContext(trace, secretMasker, state, options, this); + trace.Info($"Evaluating: {ConvertToExpression()}"); + var result = Evaluate(context); + + // Trace the result + TraceTreeResult(context, result.Value, result.Kind); + + return result; + } + finally + { + if (secretMasker != null && secretMasker != originalSecretMasker) + { + (secretMasker as IDisposable)?.Dispose(); + secretMasker = null; + } + } + } + + /// + /// This function is intended only for ExpressionNode authors to call. The EvaluationContext + /// caches result-state specific to the evaluation instance. + /// + public EvaluationResult Evaluate(EvaluationContext context) + { + // Evaluate + Level = Container == null ? 0 : Container.Level + 1; + TraceVerbose(context, Level, $"Evaluating {Name}:"); + var coreResult = EvaluateCore(context, out ResultMemory coreMemory); + + if (coreMemory == null) + { + coreMemory = new ResultMemory(); + } + + // Convert to canonical value + var val = ExpressionUtility.ConvertToCanonicalValue(coreResult, out ValueKind kind, out Object raw); + + // The depth can be safely trimmed when the total size of the core result is known, + // or when the total size of the core result can easily be determined. + var trimDepth = coreMemory.IsTotal || (Object.ReferenceEquals(raw, null) && ExpressionUtility.IsPrimitive(kind)); + + // Account for the memory overhead of the core result + var coreBytes = coreMemory.Bytes ?? EvaluationMemory.CalculateBytes(raw ?? val); + context.Memory.AddAmount(Level, coreBytes, trimDepth); + + // Account for the memory overhead of the conversion result + if (!Object.ReferenceEquals(raw, null)) + { + var conversionBytes = EvaluationMemory.CalculateBytes(val); + context.Memory.AddAmount(Level, conversionBytes); + } + + var result = new EvaluationResult(context, Level, val, kind, raw); + + // Store the trace result + if (this.TraceFullyRealized) + { + context.SetTraceResult(this, result); + } + + return result; + } + + internal abstract String ConvertToExpression(); + + internal abstract String ConvertToRealizedExpression(EvaluationContext context); + + /// + /// Evaluates the node + /// + /// The current expression context + /// + /// Helps determine how much memory is being consumed across the evaluation of the expression. + /// + protected abstract Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory); + + protected MemoryCounter CreateMemoryCounter(EvaluationContext context) + { + return new MemoryCounter(this, context.Options.MaxMemory); + } + + private void TraceTreeResult( + EvaluationContext context, + Object result, + ValueKind kind) + { + // Get the realized expression + String realizedExpression = ConvertToRealizedExpression(context); + + // Format the result + String traceValue = ExpressionUtility.FormatValue(context.SecretMasker, result, kind); + + // Only trace the realized expression if it is meaningfully different + if (!String.Equals(realizedExpression, traceValue, StringComparison.Ordinal)) + { + if (kind == ValueKind.Number && + String.Equals(realizedExpression, $"'{traceValue}'", StringComparison.Ordinal)) + { + // Don't bother tracing the realized expression when the result is a number and the + // realized expresion is a precisely matching string. + } + else + { + context.Trace.Info($"Expanded: {realizedExpression}"); + } + } + + // Always trace the result + context.Trace.Info($"Result: {traceValue}"); + } + + private static void TraceVerbose( + EvaluationContext context, + Int32 level, + String message) + { + context.Trace.Verbose(String.Empty.PadLeft(level * 2, '.') + (message ?? String.Empty)); + } + + private static readonly ValueKind[] s_simpleKinds = new[] + { + ValueKind.Boolean, + ValueKind.Null, + ValueKind.Number, + ValueKind.String, + }; + + private String m_name; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionUtility.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionUtility.cs new file mode 100644 index 00000000000..f4a3d68d5e2 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/ExpressionUtility.cs @@ -0,0 +1,265 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Reflection; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal static class ExpressionUtility + { + internal static Object ConvertToCanonicalValue( + Object val, + out ValueKind kind, + out Object raw) + { + raw = null; + + if (Object.ReferenceEquals(val, null)) + { + kind = ValueKind.Null; + return null; + } + else if (val is Boolean) + { + kind = ValueKind.Boolean; + return val; + } + else if (val is Double) + { + kind = ValueKind.Number; + return val; + } + else if (val is String) + { + kind = ValueKind.String; + return val; + } + else if (val is INull n) + { + kind = ValueKind.Null; + raw = val; + return null; + } + else if (val is IBoolean boolean) + { + kind = ValueKind.Boolean; + raw = val; + return boolean.GetBoolean(); + } + else if (val is INumber number) + { + kind = ValueKind.Number; + raw = val; + return number.GetNumber(); + } + else if (val is IString str) + { + kind = ValueKind.String; + raw = val; + return str.GetString(); + } + else if (val is IReadOnlyObject) + { + kind = ValueKind.Object; + return val; + } + else if (val is IReadOnlyArray) + { + kind = ValueKind.Array; + return val; + } + else if (!val.GetType().GetTypeInfo().IsClass) + { + if (val is Decimal || val is Byte || val is SByte || val is Int16 || val is UInt16 || val is Int32 || val is UInt32 || val is Int64 || val is UInt64 || val is Single) + { + kind = ValueKind.Number; + return Convert.ToDouble(val); + } + else if (val is Enum) + { + var strVal = String.Format(CultureInfo.InvariantCulture, "{0:G}", val); + if (Double.TryParse(strVal, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out Double doubleValue)) + { + kind = ValueKind.Number; + return doubleValue; + } + + kind = ValueKind.String; + return strVal; + } + } + + kind = ValueKind.Object; + return val; + } + + internal static String FormatValue( + ISecretMasker secretMasker, + EvaluationResult evaluationResult) + { + return FormatValue(secretMasker, evaluationResult.Value, evaluationResult.Kind); + } + + internal static String FormatValue( + ISecretMasker secretMasker, + Object value, + ValueKind kind) + { + switch (kind) + { + case ValueKind.Null: + return ExpressionConstants.Null; + + case ValueKind.Boolean: + return ((Boolean)value) ? ExpressionConstants.True : ExpressionConstants.False; + + case ValueKind.Number: + var strNumber = ((Double)value).ToString(ExpressionConstants.NumberFormat, CultureInfo.InvariantCulture); + return secretMasker != null ? secretMasker.MaskSecrets(strNumber) : strNumber; + + case ValueKind.String: + // Mask secrets before string-escaping. + var strValue = secretMasker != null ? secretMasker.MaskSecrets(value as String) : value as String; + return $"'{StringEscape(strValue)}'"; + + case ValueKind.Array: + case ValueKind.Object: + return kind.ToString(); + + default: // Should never reach here. + throw new NotSupportedException($"Unable to convert to realized expression. Unexpected value kind: {kind}"); + } + } + + internal static bool IsLegalKeyword(String str) + { + if (String.IsNullOrEmpty(str)) + { + return false; + } + + var first = str[0]; + if ((first >= 'a' && first <= 'z') || + (first >= 'A' && first <= 'Z') || + first == '_') + { + for (var i = 1; i < str.Length; i++) + { + var c = str[i]; + if ((c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + (c >= '0' && c <= '9') || + c == '_' || + c == '-') + { + // OK + } + else + { + return false; + } + } + + return true; + } + else + { + return false; + } + + } + + internal static Boolean IsPrimitive(ValueKind kind) + { + switch (kind) + { + case ValueKind.Null: + case ValueKind.Boolean: + case ValueKind.Number: + case ValueKind.String: + return true; + default: + return false; + } + } + + /// + /// The rules here attempt to follow Javascript rules for coercing a string into a number + /// for comparison. That is, the Number() function in Javascript. + /// + internal static Double ParseNumber(String str) + { + // Trim + str = str?.Trim() ?? String.Empty; + + // Empty + if (String.IsNullOrEmpty(str)) + { + return 0d; + } + // Try parse + else if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var value)) + { + return value; + } + // Check for 0x[0-9a-fA-F]+ + else if (str[0] == '0' && + str.Length > 2 && + str[1] == 'x' && + str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F'))) + { + // Try parse + if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integer)) + { + return (Double)integer; + } + + // Otherwise exceeds range + } + // Check for 0o[0-9]+ + else if (str[0] == '0' && + str.Length > 2 && + str[1] == 'o' && + str.Skip(2).All(x => x >= '0' && x <= '7')) + { + // Try parse + var integer = default(Int32?); + try + { + integer = Convert.ToInt32(str.Substring(2), 8); + } + // Otherwise exceeds range + catch (Exception) + { + } + + // Success + if (integer != null) + { + return (Double)integer.Value; + } + } + // Infinity + else if (String.Equals(str, ExpressionConstants.Infinity, StringComparison.Ordinal)) + { + return Double.PositiveInfinity; + } + // -Infinity + else if (String.Equals(str, ExpressionConstants.NegativeInfinity, StringComparison.Ordinal)) + { + return Double.NegativeInfinity; + } + + // Otherwise NaN + return Double.NaN; + } + + internal static String StringEscape(String value) + { + return String.IsNullOrEmpty(value) ? String.Empty : value.Replace("'", "''"); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Function.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Function.cs new file mode 100644 index 00000000000..9001ff2309e --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Function.cs @@ -0,0 +1,45 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Linq; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class Function : Container + { + /// + /// Generally this should not be overridden. True indicates the result of the node is traced as part of the "expanded" + /// (i.e. "realized") trace information. Otherwise the node expression is printed, and parameters to the node may or + /// may not be fully realized - depending on each respective parameter's trace-fully-realized setting. + /// + /// The purpose is so the end user can understand how their expression expanded at run time. For example, consider + /// the expression: eq(variables.publish, 'true'). The runtime-expanded expression may be: eq('true', 'true') + /// + protected override Boolean TraceFullyRealized => true; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "{0}({1})", + Name, + String.Join(", ", Parameters.Select(x => x.ConvertToExpression()))); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "{0}({1})", + Name, + String.Join(", ", Parameters.Select(x => x.ConvertToRealizedExpression(context)))); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Contains.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Contains.cs new file mode 100644 index 00000000000..67cfbb89e4e --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Contains.cs @@ -0,0 +1,44 @@ +using System; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class Contains : Function + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + if (left.IsPrimitive) + { + var leftString = left.ConvertToString(); + + var right = Parameters[1].Evaluate(context); + if (right.IsPrimitive) + { + var rightString = right.ConvertToString(); + return leftString.IndexOf(rightString, StringComparison.OrdinalIgnoreCase) >= 0; + } + } + else if (left.TryGetCollectionInterface(out var collection) && + collection is IReadOnlyArray array && + array.Count > 0) + { + var right = Parameters[1].Evaluate(context); + foreach (var item in array) + { + var itemResult = EvaluationResult.CreateIntermediateResult(context, item); + if (right.AbstractEqual(itemResult)) + { + return true; + } + } + } + + return false; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/EndsWith.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/EndsWith.cs new file mode 100644 index 00000000000..10efd1e8658 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/EndsWith.cs @@ -0,0 +1,30 @@ +using System; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class EndsWith : Function + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + if (left.IsPrimitive) + { + var leftString = left.ConvertToString(); + + var right = Parameters[1].Evaluate(context); + if (right.IsPrimitive) + { + var rightString = right.ConvertToString(); + return leftString.EndsWith(rightString, StringComparison.OrdinalIgnoreCase); + } + } + + return false; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Format.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Format.cs new file mode 100644 index 00000000000..40e5eab9476 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Format.cs @@ -0,0 +1,298 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; +using ExpressionResources = GitHub.DistributedTask.Expressions.ExpressionResources; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class Format : Function + { + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var format = Parameters[0].Evaluate(context).ConvertToString(); + var index = 0; + var result = new FormatResultBuilder(this, context, CreateMemoryCounter(context)); + while (index < format.Length) + { + var lbrace = format.IndexOf('{', index); + var rbrace = format.IndexOf('}', index); + + // Left brace + if (lbrace >= 0 && (rbrace < 0 || rbrace > lbrace)) + { + // Escaped left brace + if (SafeCharAt(format, lbrace + 1) == '{') + { + result.Append(format.Substring(index, lbrace - index + 1)); + index = lbrace + 2; + } + // Left brace, number, optional format specifiers, right brace + else if (rbrace > lbrace + 1 && + ReadArgIndex(format, lbrace + 1, out Byte argIndex, out Int32 endArgIndex) && + ReadFormatSpecifiers(format, endArgIndex + 1, out String formatSpecifiers, out rbrace)) + { + // Check parameter count + if (argIndex > Parameters.Count - 2) + { + throw new FormatException(ExpressionResources.InvalidFormatArgIndex(format)); + } + + // Append the portion before the left brace + if (lbrace > index) + { + result.Append(format.Substring(index, lbrace - index)); + } + + // Append the arg + result.Append(argIndex, formatSpecifiers); + index = rbrace + 1; + } + else + { + throw new FormatException(ExpressionResources.InvalidFormatString(format)); + } + } + // Right brace + else if (rbrace >= 0) + { + // Escaped right brace + if (SafeCharAt(format, rbrace + 1) == '}') + { + result.Append(format.Substring(index, rbrace - index + 1)); + index = rbrace + 2; + } + else + { + throw new FormatException(ExpressionResources.InvalidFormatString(format)); + } + } + // Last segment + else + { + result.Append(format.Substring(index)); + break; + } + } + + return result.ToString(); + } + + private Boolean ReadArgIndex( + String str, + Int32 startIndex, + out Byte result, + out Int32 endIndex) + { + // Count the number of digits + var length = 0; + while (Char.IsDigit(SafeCharAt(str, startIndex + length))) + { + length++; + } + + // Validate at least one digit + if (length < 1) + { + result = default; + endIndex = default; + return false; + } + + // Parse the number + endIndex = startIndex + length - 1; + return Byte.TryParse(str.Substring(startIndex, length), NumberStyles.None, CultureInfo.InvariantCulture, out result); + } + + private Boolean ReadFormatSpecifiers( + String str, + Int32 startIndex, + out String result, + out Int32 rbrace) + { + // No format specifiers + var c = SafeCharAt(str, startIndex); + if (c == '}') + { + result = String.Empty; + rbrace = startIndex; + return true; + } + + // Validate starts with ":" + if (c != ':') + { + result = default; + rbrace = default; + return false; + } + + // Read the specifiers + var specifiers = new StringBuilder(); + var index = startIndex + 1; + while (true) + { + // Validate not the end of the string + if (index >= str.Length) + { + result = default; + rbrace = default; + return false; + } + + c = str[index]; + + // Not right-brace + if (c != '}') + { + specifiers.Append(c); + index++; + } + // Escaped right-brace + else if (SafeCharAt(str, index + 1) == '}') + { + specifiers.Append('}'); + index += 2; + } + // Closing right-brace + else + { + result = specifiers.ToString(); + rbrace = index; + return true; + } + } + } + + private Char SafeCharAt( + String str, + Int32 index) + { + if (str.Length > index) + { + return str[index]; + } + + return '\0'; + } + + private sealed class FormatResultBuilder + { + internal FormatResultBuilder( + Format node, + EvaluationContext context, + MemoryCounter counter) + { + m_node = node; + m_context = context; + m_counter = counter; + m_cache = new ArgValue[node.Parameters.Count - 1]; + } + + // Build the final string. This is when lazy segments are evaluated. + public override String ToString() + { + return String.Join( + String.Empty, + m_segments.Select(obj => + { + if (obj is Lazy lazy) + { + return lazy.Value; + } + else + { + return obj as String; + } + })); + } + + // Append a static value + internal void Append(String value) + { + if (value?.Length > 0) + { + // Track memory + m_counter.Add(value); + + // Append the segment + m_segments.Add(value); + } + } + + // Append an argument + internal void Append( + Int32 argIndex, + String formatSpecifiers) + { + // Delay execution until the final ToString + m_segments.Add(new Lazy(() => + { + String result; + + // Get the arg from the cache + var argValue = m_cache[argIndex]; + + // Evaluate the arg and cache the result + if (argValue == null) + { + // The evaluation result is required when format specifiers are used. Otherwise the string + // result is required. Go ahead and store both values. Since ConvertToString produces tracing, + // we need to run that now so the tracing appears in order in the log. + var evaluationResult = m_node.Parameters[argIndex + 1].Evaluate(m_context); + var stringResult = evaluationResult.ConvertToString(); + argValue = new ArgValue(evaluationResult, stringResult); + m_cache[argIndex] = argValue; + } + + // No format specifiers + if (String.IsNullOrEmpty(formatSpecifiers)) + { + result = argValue.StringResult; + } + // Invalid + else + { + throw new FormatException(ExpressionResources.InvalidFormatSpecifiers(formatSpecifiers, argValue.EvaluationResult.Kind)); + } + + // Track memory + if (!String.IsNullOrEmpty(result)) + { + m_counter.Add(result); + } + + return result; + })); + } + + private readonly ArgValue[] m_cache; + private readonly EvaluationContext m_context; + private readonly MemoryCounter m_counter; + private readonly Format m_node; + private readonly List m_segments = new List(); + } + + /// + /// Stores an EvaluateResult and the value converted to a String. + /// + private sealed class ArgValue + { + public ArgValue( + EvaluationResult evaluationResult, + String stringResult) + { + EvaluationResult = evaluationResult; + StringResult = stringResult; + } + + public EvaluationResult EvaluationResult { get; } + + public String StringResult { get; } + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/HashFiles.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/HashFiles.cs new file mode 100644 index 00000000000..f3a9e941aa2 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/HashFiles.cs @@ -0,0 +1,93 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Minimatch; +using System.IO; +using System.Security.Cryptography; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class HashFiles : Function + { + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + + // hashFiles() only works on the runner and only works with files under GITHUB_WORKSPACE + // Since GITHUB_WORKSPACE is set by runner, I am using that as the fact of this code runs on server or runner. + if (context.State is ObjectTemplating.TemplateContext templateContext && + templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData) && + githubContextData is DictionaryContextData githubContext && + githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace) == true && + workspace is StringContextData workspaceData) + { + string searchRoot = workspaceData.Value; + string pattern = Parameters[0].Evaluate(context).ConvertToString(); + + context.Trace.Info($"Search root directory: '{searchRoot}'"); + context.Trace.Info($"Search pattern: '{pattern}'"); + var files = Directory.GetFiles(searchRoot, "*", SearchOption.AllDirectories).OrderBy(x => x).ToList(); + if (files.Count == 0) + { + throw new ArgumentException($"'hashFiles({pattern})' failed. Directory '{searchRoot}' is empty"); + } + else + { + context.Trace.Info($"Found {files.Count} files"); + } + + var matcher = new Minimatcher(pattern, s_minimatchOptions); + files = matcher.Filter(files).ToList(); + if (files.Count == 0) + { + throw new ArgumentException($"'hashFiles({pattern})' failed. Search pattern '{pattern}' doesn't match any file under '{searchRoot}'"); + } + else + { + context.Trace.Info($"{files.Count} matches to hash"); + } + + List filesSha256 = new List(); + foreach (var file in files) + { + context.Trace.Info($"Hash {file}"); + using (SHA256 sha256hash = SHA256.Create()) + { + using (var fileStream = File.OpenRead(file)) + { + filesSha256.AddRange(sha256hash.ComputeHash(fileStream)); + } + } + } + + using (SHA256 sha256hash = SHA256.Create()) + { + var hashBytes = sha256hash.ComputeHash(filesSha256.ToArray()); + StringBuilder hashString = new StringBuilder(); + for (int i = 0; i < hashBytes.Length; i++) + { + hashString.Append(hashBytes[i].ToString("x2")); + } + var result = hashString.ToString(); + context.Trace.Info($"Final hash result: '{result}'"); + return result; + } + } + else + { + throw new InvalidOperationException("'hashfiles' expression function is only supported under runner context."); + } + } + + private static readonly Options s_minimatchOptions = new Options + { + Dot = true, + NoBrace = true, + NoCase = Environment.OSVersion.Platform != PlatformID.Unix && Environment.OSVersion.Platform != PlatformID.MacOSX + }; + } +} \ No newline at end of file diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Join.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Join.cs new file mode 100644 index 00000000000..f3d9c3b9ceb --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/Join.cs @@ -0,0 +1,74 @@ +using System; +using System.Text; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class Join : Function + { + protected sealed override Boolean TraceFullyRealized => true; + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var items = Parameters[0].Evaluate(context); + + // Array + if (items.TryGetCollectionInterface(out var collection) && + collection is IReadOnlyArray array && + array.Count > 0) + { + var result = new StringBuilder(); + var memory = new MemoryCounter(this, context.Options.MaxMemory); + + // Append the first item + var item = array[0]; + var itemResult = EvaluationResult.CreateIntermediateResult(context, item); + var itemString = itemResult.ConvertToString(); + memory.Add(itemString); + result.Append(itemString); + + // More items? + if (array.Count > 1) + { + var separator = ","; + if (Parameters.Count > 1) + { + var separatorResult = Parameters[1].Evaluate(context); + if (separatorResult.IsPrimitive) + { + separator = separatorResult.ConvertToString(); + } + } + + for (var i = 1; i < array.Count; i++) + { + // Append the separator + memory.Add(separator); + result.Append(separator); + + // Append the next item + var nextItem = array[i]; + var nextItemResult = EvaluationResult.CreateIntermediateResult(context, nextItem); + var nextItemString = nextItemResult.ConvertToString(); + memory.Add(nextItemString); + result.Append(nextItemString); + } + } + + return result.ToString(); + } + // Primitive + else if (items.IsPrimitive) + { + return items.ConvertToString(); + } + // Otherwise return empty string + else + { + return String.Empty; + } + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/NoOperation.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/NoOperation.cs new file mode 100644 index 00000000000..e10386786e8 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/NoOperation.cs @@ -0,0 +1,22 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + /// + /// Used for building expression parse trees. + /// + internal sealed class NoOperation : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return null; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/StartsWith.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/StartsWith.cs new file mode 100644 index 00000000000..80ea80344af --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/StartsWith.cs @@ -0,0 +1,30 @@ +using System; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class StartsWith : Function + { + protected sealed override Boolean TraceFullyRealized => false; + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + if (left.IsPrimitive) + { + var leftString = left.ConvertToString(); + + var right = Parameters[1].Evaluate(context); + if (right.IsPrimitive) + { + var rightString = right.ConvertToString(); + return leftString.StartsWith(rightString, StringComparison.OrdinalIgnoreCase); + } + } + + return false; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/ToJson.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/ToJson.cs new file mode 100644 index 00000000000..268466be3bc --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Functions/ToJson.cs @@ -0,0 +1,390 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Text; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Functions +{ + internal sealed class ToJson : Function + { + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var result = new StringBuilder(); + var memory = new MemoryCounter(this, context.Options.MaxMemory); + var current = Parameters[0].Evaluate(context); + var ancestors = new Stack(); + + do + { + // Descend as much as possible + while (true) + { + // Collection + if (current.TryGetCollectionInterface(out Object collection)) + { + // Array + if (collection is IReadOnlyArray array) + { + if (array.Count > 0) + { + // Write array start + WriteArrayStart(result, memory, ancestors); + + // Move to first item + var enumerator = new ArrayEnumerator(context, current, array); + enumerator.MoveNext(); + ancestors.Push(enumerator); + current = enumerator.Current; + } + else + { + // Write empty array + WriteEmptyArray(result, memory, ancestors); + break; + } + } + // Mapping + else if (collection is IReadOnlyObject obj) + { + if (obj.Count > 0) + { + // Write mapping start + WriteMappingStart(result, memory, ancestors); + + // Move to first pair + var enumerator = new ObjectEnumerator(context, current, obj); + enumerator.MoveNext(); + ancestors.Push(enumerator); + + // Write mapping key + WriteMappingKey(context, result, memory, enumerator.Current.Key, ancestors); + + // Move to mapping value + current = enumerator.Current.Value; + } + else + { + // Write empty mapping + WriteEmptyMapping(result, memory, ancestors); + break; + } + } + else + { + throw new NotSupportedException($"Unexpected type '{collection?.GetType().FullName}'"); + } + } + // Not a collection + else + { + // Write value + WriteValue(context, result, memory, current, ancestors); + break; + } + } + + // Next sibling or ancestor sibling + do + { + if (ancestors.Count > 0) + { + var parent = ancestors.Peek(); + + // Parent array + if (parent is ArrayEnumerator arrayEnumerator) + { + // Move to next item + if (arrayEnumerator.MoveNext()) + { + current = arrayEnumerator.Current; + + break; + } + // Move to parent + else + { + ancestors.Pop(); + current = arrayEnumerator.Array; + + // Write array end + WriteArrayEnd(result, memory, ancestors); + } + } + // Parent mapping + else if (parent is ObjectEnumerator objectEnumerator) + { + // Move to next pair + if (objectEnumerator.MoveNext()) + { + // Write mapping key + WriteMappingKey(context, result, memory, objectEnumerator.Current.Key, ancestors); + + // Move to mapping value + current = objectEnumerator.Current.Value; + + break; + } + // Move to parent + else + { + ancestors.Pop(); + current = objectEnumerator.Object; + + // Write mapping end + WriteMappingEnd(result, memory, ancestors); + } + } + else + { + throw new NotSupportedException($"Unexpected type '{parent?.GetType().FullName}'"); + } + } + else + { + current = null; + } + + } while (current != null); + + } while (current != null); + + return result.ToString(); + } + + private void WriteArrayStart( + StringBuilder writer, + MemoryCounter memory, + Stack ancestors) + { + var str = PrefixValue("[", ancestors); + memory.Add(str); + writer.Append(str); + } + + private void WriteMappingStart( + StringBuilder writer, + MemoryCounter memory, + Stack ancestors) + { + var str = PrefixValue("{", ancestors); + memory.Add(str); + writer.Append(str); + } + + private void WriteArrayEnd( + StringBuilder writer, + MemoryCounter memory, + Stack ancestors) + { + var str = $"\n{new String(' ', ancestors.Count * 2)}]"; + memory.Add(str); + writer.Append(str); + } + + private void WriteMappingEnd( + StringBuilder writer, + MemoryCounter memory, + Stack ancestors) + { + var str = $"\n{new String(' ', ancestors.Count * 2)}}}"; + memory.Add(str); + writer.Append(str); + } + + private void WriteEmptyArray( + StringBuilder writer, + MemoryCounter memory, + Stack ancestors) + { + var str = PrefixValue("[]", ancestors); + memory.Add(str); + writer.Append(str); + } + + private void WriteEmptyMapping( + StringBuilder writer, + MemoryCounter memory, + Stack ancestors) + { + var str = PrefixValue("{}", ancestors); + memory.Add(str); + writer.Append(str); + } + + private void WriteMappingKey( + EvaluationContext context, + StringBuilder writer, + MemoryCounter memory, + EvaluationResult key, + Stack ancestors) + { + var str = PrefixValue(JsonUtility.ToString(key.ConvertToString()), ancestors, isMappingKey: true); + memory.Add(str); + writer.Append(str); + } + + private void WriteValue( + EvaluationContext context, + StringBuilder writer, + MemoryCounter memory, + EvaluationResult value, + Stack ancestors) + { + String str; + switch (value.Kind) + { + case ValueKind.Null: + str = "null"; + break; + + case ValueKind.Boolean: + str = (Boolean)value.Value ? "true" : "false"; + break; + + case ValueKind.Number: + str = value.ConvertToString(); + break; + + case ValueKind.String: + str = JsonUtility.ToString(value.Value); + break; + + default: + str = "{}"; // The value is an object we don't know how to traverse + break; + } + + str = PrefixValue(str, ancestors); + memory.Add(str); + writer.Append(str); + } + + private String PrefixValue( + String value, + Stack ancestors, + Boolean isMappingKey = false) + { + var level = ancestors.Count; + var parent = level > 0 ? ancestors.Peek() : null; + + if (!isMappingKey && parent is ObjectEnumerator) + { + return $": {value}"; + } + else if (level > 0) + { + return $"{(parent.IsFirst ? String.Empty : ",")}\n{new String(' ', level * 2)}{value}"; + } + else + { + return value; + } + } + + private interface ICollectionEnumerator : IEnumerator + { + Boolean IsFirst { get; } + } + + private sealed class ArrayEnumerator : ICollectionEnumerator + { + public ArrayEnumerator( + EvaluationContext context, + EvaluationResult result, + IReadOnlyArray array) + { + m_context = context; + m_result = result; + m_enumerator = array.GetEnumerator(); + } + + public EvaluationResult Array => m_result; + + public EvaluationResult Current => m_current; + + Object IEnumerator.Current => m_current; + + public Boolean IsFirst => m_index == 0; + + public Boolean MoveNext() + { + if (m_enumerator.MoveNext()) + { + m_current = EvaluationResult.CreateIntermediateResult(m_context, m_enumerator.Current); + m_index++; + return true; + } + else + { + m_current = null; + return false; + } + } + + public void Reset() + { + throw new NotSupportedException(nameof(Reset)); + } + + private readonly EvaluationContext m_context; + private readonly IEnumerator m_enumerator; + private readonly EvaluationResult m_result; + private EvaluationResult m_current; + private Int32 m_index = -1; + } + + private sealed class ObjectEnumerator : ICollectionEnumerator + { + public ObjectEnumerator( + EvaluationContext context, + EvaluationResult result, + IReadOnlyObject obj) + { + m_context = context; + m_result = result; + m_enumerator = obj.GetEnumerator(); + } + + public KeyValuePair Current => m_current; + + Object IEnumerator.Current => m_current; + + public Boolean IsFirst => m_index == 0; + + public EvaluationResult Object => m_result; + + public Boolean MoveNext() + { + if (m_enumerator.MoveNext()) + { + var current = (KeyValuePair)m_enumerator.Current; + var key = EvaluationResult.CreateIntermediateResult(m_context, current.Key); + var value = EvaluationResult.CreateIntermediateResult(m_context, current.Value); + m_current = new KeyValuePair(key, value); + m_index++; + return true; + } + else + { + m_current = default(KeyValuePair); + return false; + } + } + + public void Reset() + { + throw new NotSupportedException(nameof(Reset)); + } + + private readonly EvaluationContext m_context; + private readonly IEnumerator m_enumerator; + private readonly EvaluationResult m_result; + private KeyValuePair m_current; + private Int32 m_index = -1; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/IBoolean.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/IBoolean.cs new file mode 100644 index 00000000000..b5b45cbf7d6 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/IBoolean.cs @@ -0,0 +1,11 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IBoolean + { + Boolean GetBoolean(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/INull.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/INull.cs new file mode 100644 index 00000000000..9d86f70c901 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/INull.cs @@ -0,0 +1,9 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface INull + { + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/INumber.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/INumber.cs new file mode 100644 index 00000000000..4b2e61dd002 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/INumber.cs @@ -0,0 +1,11 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface INumber + { + Double GetNumber(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyArray.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyArray.cs new file mode 100644 index 00000000000..1d3ef5129f4 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyArray.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IReadOnlyArray + { + Int32 Count { get; } + + Object this[Int32 index] { get; } + + IEnumerator GetEnumerator(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyObject.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyObject.cs new file mode 100644 index 00000000000..aa78b01e873 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/IReadOnlyObject.cs @@ -0,0 +1,27 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IReadOnlyObject + { + Int32 Count { get; } + + IEnumerable Keys { get; } + + IEnumerable Values { get; } + + Object this[String key] { get; } + + Boolean ContainsKey(String key); + + IEnumerator GetEnumerator(); + + Boolean TryGetValue( + String key, + out Object value); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/IString.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/IString.cs new file mode 100644 index 00000000000..bcc5c8da3ef --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/IString.cs @@ -0,0 +1,11 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IString + { + String GetString(); + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Literal.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Literal.cs new file mode 100644 index 00000000000..75e65f62f0b --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Literal.cs @@ -0,0 +1,43 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class Literal : ExpressionNode + { + public Literal(Object val) + { + Value = ExpressionUtility.ConvertToCanonicalValue(val, out var kind, out _); + Kind = kind; + Name = kind.ToString(); + } + + public ValueKind Kind { get; } + + public Object Value { get; } + + // Prevent the value from being stored on the evaluation context. + // This avoids unneccessarily duplicating the value in memory. + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return ExpressionUtility.FormatValue(null, Value, Kind); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + return ExpressionUtility.FormatValue(null, Value, Kind); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return Value; + } + } + +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/MemoryCounter.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/MemoryCounter.cs new file mode 100644 index 00000000000..cba66e30422 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/MemoryCounter.cs @@ -0,0 +1,94 @@ +using System; +using System.ComponentModel; +using Newtonsoft.Json.Linq; +using ExpressionResources = GitHub.DistributedTask.Expressions.ExpressionResources; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + /// + /// Helper class for ExpressionNode authors. This class helps calculate memory overhead for a result object. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class MemoryCounter + { + internal MemoryCounter( + ExpressionNode node, + Int32? maxBytes) + { + m_node = node; + m_maxBytes = (maxBytes ?? 0) > 0 ? maxBytes.Value : Int32.MaxValue; + } + + public Int32 CurrentBytes => m_currentBytes; + + public void Add(Int32 amount) + { + if (!TryAdd(amount)) + { + throw new InvalidOperationException(ExpressionResources.ExceededAllowedMemory(m_node?.ConvertToExpression())); + } + } + + public void Add(String value) + { + Add(CalculateSize(value)); + } + + public void AddMinObjectSize() + { + Add(MinObjectSize); + } + + public void Remove(String value) + { + m_currentBytes -= CalculateSize(value); + } + + public static Int32 CalculateSize(String value) + { + // This measurement doesn't have to be perfect. + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + Int32 bytes; + checked + { + bytes = StringBaseOverhead + ((value?.Length ?? 0) * 2); + } + return bytes; + } + + internal Boolean TryAdd(Int32 amount) + { + try + { + checked + { + amount += m_currentBytes; + } + + if (amount > m_maxBytes) + { + return false; + } + + m_currentBytes = amount; + return true; + } + catch (OverflowException) + { + return false; + } + } + + internal Boolean TryAdd(String value) + { + return TryAdd(CalculateSize(value)); + } + + internal const Int32 MinObjectSize = 24; + internal const Int32 StringBaseOverhead = 26; + private readonly Int32 m_maxBytes; + private readonly ExpressionNode m_node; + private Int32 m_currentBytes; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/NamedValue.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/NamedValue.cs new file mode 100644 index 00000000000..8ec3c42f65a --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/NamedValue.cs @@ -0,0 +1,24 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class NamedValue : ExpressionNode + { + internal sealed override string ConvertToExpression() => Name; + + protected sealed override Boolean TraceFullyRealized => true; + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return Name; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/NoOperationNamedValue.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/NoOperationNamedValue.cs new file mode 100644 index 00000000000..c53ab031997 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/NoOperationNamedValue.cs @@ -0,0 +1,19 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + internal sealed class NoOperationNamedValue : NamedValue + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return null; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/And.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/And.cs new file mode 100644 index 00000000000..e8339cee262 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/And.cs @@ -0,0 +1,51 @@ +using System; +using System.Globalization; +using System.Linq; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class And : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0})", + String.Join(" && ", Parameters.Select(x => x.ConvertToExpression()))); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0})", + String.Join(" && ", Parameters.Select(x => x.ConvertToRealizedExpression(context)))); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var result = default(EvaluationResult); + foreach (var parameter in Parameters) + { + result = parameter.Evaluate(context); + if (result.IsFalsy) + { + return result.Value; + } + } + + return result?.Value; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Equal.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Equal.cs new file mode 100644 index 00000000000..7ad1e5b7267 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Equal.cs @@ -0,0 +1,44 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class Equal : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0} == {1})", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0} == {1})", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + var right = Parameters[1].Evaluate(context); + return left.AbstractEqual(right); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThan.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThan.cs new file mode 100644 index 00000000000..8c3ee5f80bb --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThan.cs @@ -0,0 +1,44 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class GreaterThan : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0} > {1})", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0} > {1})", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + var right = Parameters[1].Evaluate(context); + return left.AbstractGreaterThan(right); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThanOrEqual.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThanOrEqual.cs new file mode 100644 index 00000000000..93e984a5367 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/GreaterThanOrEqual.cs @@ -0,0 +1,44 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class GreaterThanOrEqual : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0} >= {1})", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0} >= {1})", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + var right = Parameters[1].Evaluate(context); + return left.AbstractGreaterThanOrEqual(right); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Index.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Index.cs new file mode 100644 index 00000000000..d151818d5cd --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Index.cs @@ -0,0 +1,286 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class Index : Container + { + protected sealed override Boolean TraceFullyRealized => true; + + internal sealed override String ConvertToExpression() + { + // Verify if we can simplify the expression, we would rather return + // github.sha then github['sha'] so we check if this is a simple case. + if (Parameters[1] is Literal literal && + literal.Value is String literalString && + ExpressionUtility.IsLegalKeyword(literalString)) + { + return String.Format( + CultureInfo.InvariantCulture, + "{0}.{1}", + Parameters[0].ConvertToExpression(), + literalString); + } + else + { + return String.Format( + CultureInfo.InvariantCulture, + "{0}[{1}]", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "{0}[{1}]", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + var left = Parameters[0].Evaluate(context); + + // Not a collection + if (!left.TryGetCollectionInterface(out Object collection)) + { + resultMemory = null; + return Parameters[1] is Wildcard ? new FilteredArray() : null; + } + // Filtered array + else if (collection is FilteredArray filteredArray) + { + return HandleFilteredArray(context, filteredArray, out resultMemory); + } + // Object + else if (collection is IReadOnlyObject obj) + { + return HandleObject(context, obj, out resultMemory); + } + // Array + else if (collection is IReadOnlyArray array) + { + return HandleArray(context, array, out resultMemory); + } + + resultMemory = null; + return null; + } + + private Object HandleFilteredArray( + EvaluationContext context, + FilteredArray filteredArray, + out ResultMemory resultMemory) + { + var result = new FilteredArray(); + var counter = new MemoryCounter(this, context.Options.MaxMemory); + + var index = new IndexHelper(context, Parameters[1]); + + foreach (var item in filteredArray) + { + // Leverage the expression SDK to traverse the object + var itemResult = EvaluationResult.CreateIntermediateResult(context, item); + if (itemResult.TryGetCollectionInterface(out var nestedCollection)) + { + // Apply the index to each child object + if (nestedCollection is IReadOnlyObject nestedObject) + { + // Wildcard + if (index.IsWildcard) + { + foreach (var val in nestedObject.Values) + { + result.Add(val); + counter.Add(IntPtr.Size); + } + } + // String + else if (index.HasStringIndex) + { + if (nestedObject.TryGetValue(index.StringIndex, out var nestedObjectValue)) + { + result.Add(nestedObjectValue); + counter.Add(IntPtr.Size); + } + } + } + // Apply the index to each child array + else if (nestedCollection is IReadOnlyArray nestedArray) + { + // Wildcard + if (index.IsWildcard) + { + foreach (var val in nestedArray) + { + result.Add(val); + counter.Add(IntPtr.Size); + } + } + // String + else if (index.HasIntegerIndex && + index.IntegerIndex < nestedArray.Count) + { + result.Add(nestedArray[index.IntegerIndex]); + counter.Add(IntPtr.Size); + } + } + } + } + + resultMemory = new ResultMemory { Bytes = counter.CurrentBytes }; + return result; + } + + private Object HandleObject( + EvaluationContext context, + IReadOnlyObject obj, + out ResultMemory resultMemory) + { + var index = new IndexHelper(context, Parameters[1]); + + // Wildcard + if (index.IsWildcard) + { + var filteredArray = new FilteredArray(); + var counter = new MemoryCounter(this, context.Options.MaxMemory); + counter.AddMinObjectSize(); + + foreach (var val in obj.Values) + { + filteredArray.Add(val); + counter.Add(IntPtr.Size); + } + + resultMemory = new ResultMemory { Bytes = counter.CurrentBytes }; + return filteredArray; + } + // String + else if (index.HasStringIndex && + obj.TryGetValue(index.StringIndex, out var result)) + { + resultMemory = null; + return result; + } + + resultMemory = null; + return null; + } + + private Object HandleArray( + EvaluationContext context, + IReadOnlyArray array, + out ResultMemory resultMemory) + { + var index = new IndexHelper(context, Parameters[1]); + + // Wildcard + if (index.IsWildcard) + { + var filtered = new FilteredArray(); + var counter = new MemoryCounter(this, context.Options.MaxMemory); + counter.AddMinObjectSize(); + + foreach (var item in array) + { + filtered.Add(item); + counter.Add(IntPtr.Size); + } + + resultMemory = new ResultMemory { Bytes = counter.CurrentBytes }; + return filtered; + } + // Integer + else if (index.HasIntegerIndex && index.IntegerIndex < array.Count) + { + resultMemory = null; + return array[index.IntegerIndex]; + } + + resultMemory = null; + return null; + } + + private class FilteredArray : IReadOnlyArray + { + public FilteredArray() + { + m_list = new List(); + } + + public void Add(Object o) + { + m_list.Add(o); + } + + public Int32 Count => m_list.Count; + + public Object this[Int32 index] => m_list[index]; + + public IEnumerator GetEnumerator() => m_list.GetEnumerator(); + + private readonly IList m_list; + } + + private class IndexHelper + { + public IndexHelper( + EvaluationContext context, + ExpressionNode parameter) + { + m_parameter = parameter; + m_result = parameter.Evaluate(context); + + m_integerIndex = new Lazy(() => + { + var doubleIndex = m_result.ConvertToNumber(); + if (Double.IsNaN(doubleIndex) || doubleIndex < 0d) + { + return null; + } + + doubleIndex = Math.Floor(doubleIndex); + if (doubleIndex > (Double)Int32.MaxValue) + { + return null; + } + + return (Int32)doubleIndex; + }); + + m_stringIndex = new Lazy(() => + { + return m_result.IsPrimitive ? m_result.ConvertToString() : null; + }); + } + + public Boolean HasIntegerIndex => m_integerIndex.Value != null; + + public Boolean HasStringIndex => m_stringIndex.Value != null; + + public Boolean IsWildcard => m_parameter is Wildcard; + + public Int32 IntegerIndex => m_integerIndex.Value ?? default(Int32); + + public String StringIndex => m_stringIndex.Value; + + private readonly ExpressionNode m_parameter; + private readonly EvaluationResult m_result; + private readonly Lazy m_integerIndex; + private readonly Lazy m_stringIndex; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThan.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThan.cs new file mode 100644 index 00000000000..29c7acb5e0f --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThan.cs @@ -0,0 +1,44 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class LessThan : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0} < {1})", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0} < {1})", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + var right = Parameters[1].Evaluate(context); + return left.AbstractLessThan(right); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThanOrEqual.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThanOrEqual.cs new file mode 100644 index 00000000000..d19391eb38e --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/LessThanOrEqual.cs @@ -0,0 +1,44 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class LessThanOrEqual : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0} <= {1})", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0} <= {1})", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + var right = Parameters[1].Evaluate(context); + return left.AbstractLessThanOrEqual(right); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Not.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Not.cs new file mode 100644 index 00000000000..22ec01d75d6 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Not.cs @@ -0,0 +1,41 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class Not : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "!{0}", + Parameters[0].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "!{0}", + Parameters[0].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var result = Parameters[0].Evaluate(context); + return result.IsFalsy; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/NotEqual.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/NotEqual.cs new file mode 100644 index 00000000000..d96ba3fdb2b --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/NotEqual.cs @@ -0,0 +1,44 @@ +using System; +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class NotEqual : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0} != {1})", + Parameters[0].ConvertToExpression(), + Parameters[1].ConvertToExpression()); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0} != {1})", + Parameters[0].ConvertToRealizedExpression(context), + Parameters[1].ConvertToRealizedExpression(context)); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var left = Parameters[0].Evaluate(context); + var right = Parameters[1].Evaluate(context); + return left.AbstractNotEqual(right); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Or.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Or.cs new file mode 100644 index 00000000000..5f051d9d7db --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Operators/Or.cs @@ -0,0 +1,51 @@ +using System; +using System.Globalization; +using System.Linq; + +namespace GitHub.DistributedTask.Expressions2.Sdk.Operators +{ + internal sealed class Or : Container + { + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return String.Format( + CultureInfo.InvariantCulture, + "({0})", + String.Join(" || ", Parameters.Select(x => x.ConvertToExpression()))); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + // Check if the result was stored + if (context.TryGetTraceResult(this, out String result)) + { + return result; + } + + return String.Format( + CultureInfo.InvariantCulture, + "({0})", + String.Join(" || ", Parameters.Select(x => x.ConvertToRealizedExpression(context)))); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var result = default(EvaluationResult); + foreach (var parameter in Parameters) + { + result = parameter.Evaluate(context); + if (result.IsTruthy) + { + break; + } + } + + return result?.Value; + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/ResultMemory.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/ResultMemory.cs new file mode 100644 index 00000000000..b5524c5b730 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/ResultMemory.cs @@ -0,0 +1,58 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class ResultMemory + { + /// + /// Only set a non-null value when both of the following conditions are met: + /// 1) The result is a complex object. In other words, the result is + /// not a simple type: string, boolean, number, or null. + /// 2) The result is a newly created object. + /// + /// + /// For example, consider a function jsonParse() which takes a string parameter, + /// and returns a JToken object. The JToken object is newly created and a rough + /// measurement should be returned for the number of bytes it consumes in memory. + /// + /// + /// + /// For another example, consider a function which returns a sub-object from a + /// complex parameter value. From the perspective of an individual function, + /// the size of the complex parameter value is unknown. In this situation, set the + /// value to IntPtr.Size. + /// + /// + /// + /// When you are unsure, set the value to null. Null indicates the overhead of a + /// new pointer should be accounted for. + /// + /// + public Int32? Bytes { get; set; } + + /// + /// Indicates whether represents the total size of the result. + /// True indicates the accounting-overhead of downstream parameters can be discarded. + /// + /// For , this value is currently ignored. + /// + /// + /// For example, consider a funciton jsonParse() which takes a string paramter, + /// and returns a JToken object. The JToken object is newly created and a rough + /// measurement should be returned for the amount of bytes it consumes in memory. + /// Set the to true, since new object contains no references + /// to previously allocated memory. + /// + /// + /// + /// For another example, consider a function which wraps a complex parameter result. + /// should be set to the amount of newly allocated memory. + /// However since the object references previously allocated memory, set + /// to false. + /// + /// + public Boolean IsTotal { get; set; } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Sdk/Wildcard.cs b/src/Sdk/DTExpressions2/Expressions2/Sdk/Wildcard.cs new file mode 100644 index 00000000000..af11c58ea07 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Sdk/Wildcard.cs @@ -0,0 +1,32 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2.Sdk +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class Wildcard : ExpressionNode + { + // Prevent the value from being stored on the evaluation context. + // This avoids unneccessarily duplicating the value in memory. + protected sealed override Boolean TraceFullyRealized => false; + + internal sealed override String ConvertToExpression() + { + return ExpressionConstants.Wildcard.ToString(); + } + + internal sealed override String ConvertToRealizedExpression(EvaluationContext context) + { + return ExpressionConstants.Wildcard.ToString(); + } + + protected sealed override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return ExpressionConstants.Wildcard.ToString(); + } + } + +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Tokens/Associativity.cs b/src/Sdk/DTExpressions2/Expressions2/Tokens/Associativity.cs new file mode 100644 index 00000000000..8b1c6e7bfb2 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Tokens/Associativity.cs @@ -0,0 +1,9 @@ +namespace GitHub.DistributedTask.Expressions2.Tokens +{ + internal enum Associativity + { + None, + LeftToRight, + RightToLeft, + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Tokens/LexicalAnalyzer.cs b/src/Sdk/DTExpressions2/Expressions2/Tokens/LexicalAnalyzer.cs new file mode 100644 index 00000000000..7ad1d032f2a --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Tokens/LexicalAnalyzer.cs @@ -0,0 +1,491 @@ +using GitHub.DistributedTask.Expressions2.Sdk; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; + +namespace GitHub.DistributedTask.Expressions2.Tokens +{ + internal sealed class LexicalAnalyzer + { + public LexicalAnalyzer(String expression) + { + m_expression = expression; + } + + public IEnumerable UnclosedTokens => m_unclosedTokens; + + public Boolean TryGetNextToken(ref Token token) + { + // Skip whitespace + while (m_index < m_expression.Length && Char.IsWhiteSpace(m_expression[m_index])) + { + m_index++; + } + + // Test end of string + if (m_index >= m_expression.Length) + { + token = null; + return false; + } + + // Read the first character to determine the type of token. + var c = m_expression[m_index]; + switch (c) + { + case ExpressionConstants.StartGroup: // "(" + // Function call + if (m_lastToken?.Kind == TokenKind.Function) + { + token = CreateToken(TokenKind.StartParameters, c, m_index++); + } + // Logical grouping + else + { + token = CreateToken(TokenKind.StartGroup, c, m_index++); + } + break; + case ExpressionConstants.StartIndex: // "[" + token = CreateToken(TokenKind.StartIndex, c, m_index++); + break; + case ExpressionConstants.EndGroup: // ")" + // Function call + if (m_unclosedTokens.FirstOrDefault()?.Kind == TokenKind.StartParameters) // "(" function call + { + token = CreateToken(TokenKind.EndParameters, c, m_index++); + } + // Logical grouping + else + { + token = CreateToken(TokenKind.EndGroup, c, m_index++); + } + break; + case ExpressionConstants.EndIndex: // "]" + token = CreateToken(TokenKind.EndIndex, c, m_index++); + break; + case ExpressionConstants.Separator: // "," + token = CreateToken(TokenKind.Separator, c, m_index++); + break; + case ExpressionConstants.Wildcard: // "*" + token = CreateToken(TokenKind.Wildcard, c, m_index++); + break; + case '\'': + token = ReadStringToken(); + break; + case '!': // "!" and "!=" + case '>': // ">" and ">=" + case '<': // "<" and "<=" + case '=': // "==" + case '&': // "&&" + case '|': // "||" + token = ReadOperator(); + break; + default: + if (c == '.') + { + // Number + if (m_lastToken == null || + m_lastToken.Kind == TokenKind.Separator || // "," + m_lastToken.Kind == TokenKind.StartGroup || // "(" logical grouping + m_lastToken.Kind == TokenKind.StartIndex || // "[" + m_lastToken.Kind == TokenKind.StartParameters || // "(" function call + m_lastToken.Kind == TokenKind.LogicalOperator) // "!", "==", etc + { + token = ReadNumberToken(); + } + // "." + else + { + token = CreateToken(TokenKind.Dereference, c, m_index++); + } + } + else if (c == '-' || c == '+' || (c >= '0' && c <= '9')) + { + token = ReadNumberToken(); + } + else + { + token = ReadKeywordToken(); + } + + break; + } + + m_lastToken = token; + return true; + } + + private Token ReadNumberToken() + { + var startIndex = m_index; + do + { + m_index++; + } + while (m_index < m_expression.Length && (!TestTokenBoundary(m_expression[m_index]) || m_expression[m_index] == '.')); + + var length = m_index - startIndex; + var str = m_expression.Substring(startIndex, length); + var d = ExpressionUtility.ParseNumber(str); + + if (Double.IsNaN(d)) + { + return CreateToken(TokenKind.Unexpected, str, startIndex); + } + + return CreateToken(TokenKind.Number, str, startIndex, d); + } + + private Token ReadKeywordToken() + { + // Read to the end of the keyword. + var startIndex = m_index; + m_index++; // Skip the first char. It is already known to be the start of the keyword. + while (m_index < m_expression.Length && !TestTokenBoundary(m_expression[m_index])) + { + m_index++; + } + + // Test if valid keyword character sequence. + var length = m_index - startIndex; + var str = m_expression.Substring(startIndex, length); + if (ExpressionUtility.IsLegalKeyword(str)) + { + // Test if follows property dereference operator. + if (m_lastToken != null && m_lastToken.Kind == TokenKind.Dereference) + { + return CreateToken(TokenKind.PropertyName, str, startIndex); + } + + // Null + if (str.Equals(ExpressionConstants.Null, StringComparison.Ordinal)) + { + return CreateToken(TokenKind.Null, str, startIndex); + } + // Boolean + else if (str.Equals(ExpressionConstants.True, StringComparison.Ordinal)) + { + return CreateToken(TokenKind.Boolean, str, startIndex, true); + } + else if (str.Equals(ExpressionConstants.False, StringComparison.Ordinal)) + { + return CreateToken(TokenKind.Boolean, str, startIndex, false); + } + // NaN + else if (str.Equals(ExpressionConstants.NaN, StringComparison.Ordinal)) + { + return CreateToken(TokenKind.Number, str, startIndex, Double.NaN); + } + // Infinity + else if (str.Equals(ExpressionConstants.Infinity, StringComparison.Ordinal)) + { + return CreateToken(TokenKind.Number, str, startIndex, Double.PositiveInfinity); + } + + // Lookahead + var tempIndex = m_index; + while (tempIndex < m_expression.Length && Char.IsWhiteSpace(m_expression[tempIndex])) + { + tempIndex++; + } + + // Function + if (tempIndex < m_expression.Length && m_expression[tempIndex] == ExpressionConstants.StartGroup) // "(" + { + return CreateToken(TokenKind.Function, str, startIndex); + } + // Named-value + else + { + return CreateToken(TokenKind.NamedValue, str, startIndex); + } + } + else + { + // Invalid keyword + return CreateToken(TokenKind.Unexpected, str, startIndex); + } + } + + private Token ReadStringToken() + { + var startIndex = m_index; + var c = default(Char); + var closed = false; + var str = new StringBuilder(); + m_index++; // Skip the leading single-quote. + while (m_index < m_expression.Length) + { + c = m_expression[m_index++]; + if (c == '\'') + { + // End of string. + if (m_index >= m_expression.Length || m_expression[m_index] != '\'') + { + closed = true; + break; + } + + // Escaped single quote. + m_index++; + } + + str.Append(c); + } + + var length = m_index - startIndex; + var rawValue = m_expression.Substring(startIndex, length); + if (closed) + { + return CreateToken(TokenKind.String, rawValue, startIndex, str.ToString()); + } + + return CreateToken(TokenKind.Unexpected, rawValue, startIndex); + } + + private Token ReadOperator() + { + var startIndex = m_index; + var raw = default(String); + m_index++; + + // Check for a two-character operator + if (m_index < m_expression.Length) + { + m_index++; + raw = m_expression.Substring(startIndex, 2); + switch (raw) + { + case ExpressionConstants.NotEqual: + case ExpressionConstants.GreaterThanOrEqual: + case ExpressionConstants.LessThanOrEqual: + case ExpressionConstants.Equal: + case ExpressionConstants.And: + case ExpressionConstants.Or: + return CreateToken(TokenKind.LogicalOperator, raw, startIndex); + } + + // Backup + m_index--; + } + + // Check for one-character operator + raw = m_expression.Substring(startIndex, 1); + switch (raw) + { + case ExpressionConstants.Not: + case ExpressionConstants.GreaterThan: + case ExpressionConstants.LessThan: + return CreateToken(TokenKind.LogicalOperator, raw, startIndex); + } + + // Unexpected + while (m_index < m_expression.Length && !TestTokenBoundary(m_expression[m_index])) + { + m_index++; + } + + var length = m_index - startIndex; + raw = m_expression.Substring(startIndex, length); + return CreateToken(TokenKind.Unexpected, raw, startIndex); + } + + private static Boolean TestTokenBoundary(Char c) + { + switch (c) + { + case ExpressionConstants.StartGroup: // "(" + case ExpressionConstants.StartIndex: // "[" + case ExpressionConstants.EndGroup: // ")" + case ExpressionConstants.EndIndex: // "]" + case ExpressionConstants.Separator: // "," + case ExpressionConstants.Dereference: // "." + case '!': // "!" and "!=" + case '>': // ">" and ">=" + case '<': // "<" and "<=" + case '=': // "==" + case '&': // "&&" + case '|': // "||" + return true; + default: + return char.IsWhiteSpace(c); + } + } + + private Token CreateToken( + TokenKind kind, + Char rawValue, + Int32 index, + Object parsedValue = null) + { + return CreateToken(kind, rawValue.ToString(), index, parsedValue); + } + + private Token CreateToken( + TokenKind kind, + String rawValue, + Int32 index, + Object parsedValue = null) + { + // Check whether the current token is legal based on the last token + var legal = false; + switch (kind) + { + case TokenKind.StartGroup: // "(" logical grouping + // Is first or follows "," or "(" or "[" or a logical operator + legal = CheckLastToken(null, TokenKind.Separator, TokenKind.StartGroup, TokenKind.StartParameters, TokenKind.StartIndex, TokenKind.LogicalOperator); + break; + case TokenKind.StartIndex: // "[" + // Follows ")", "]", "*", a property name, or a named-value + legal = CheckLastToken(TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.PropertyName, TokenKind.NamedValue); + break; + case TokenKind.StartParameters: // "(" function call + // Follows a function + legal = CheckLastToken(TokenKind.Function); + break; + case TokenKind.EndGroup: // ")" logical grouping + // Follows ")", "]", "*", a literal, a property name, or a named-value + legal = CheckLastToken(TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.Null, TokenKind.Boolean, TokenKind.Number, TokenKind.String, TokenKind.PropertyName, TokenKind.NamedValue); + break; + case TokenKind.EndIndex: // "]" + // Follows ")", "]", "*", a literal, a property name, or a named-value + legal = CheckLastToken(TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.Null, TokenKind.Boolean, TokenKind.Number, TokenKind.String, TokenKind.PropertyName, TokenKind.NamedValue); + break; + case TokenKind.EndParameters: // ")" function call + // Follows "(" function call, ")", "]", "*", a literal, a property name, or a named-value + legal = CheckLastToken(TokenKind.StartParameters, TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.Null, TokenKind.Boolean, TokenKind.Number, TokenKind.String, TokenKind.PropertyName, TokenKind.NamedValue); + break; + case TokenKind.Separator: // "," + // Follows ")", "]", "*", a literal, a property name, or a named-value + legal = CheckLastToken(TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.Null, TokenKind.Boolean, TokenKind.Number, TokenKind.String, TokenKind.PropertyName, TokenKind.NamedValue); + break; + case TokenKind.Dereference: // "." + // Follows ")", "]", "*", a property name, or a named-value + legal = CheckLastToken(TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.PropertyName, TokenKind.NamedValue); + break; + case TokenKind.Wildcard: // "*" + // Follows "[" or "." + legal = CheckLastToken(TokenKind.StartIndex, TokenKind.Dereference); + break; + case TokenKind.LogicalOperator: // "!", "==", etc + switch (rawValue) + { + case ExpressionConstants.Not: + // Is first or follows "," or "(" or "[" or a logical operator + legal = CheckLastToken(null, TokenKind.Separator, TokenKind.StartGroup, TokenKind.StartParameters, TokenKind.StartIndex, TokenKind.LogicalOperator); + break; + default: + // Follows ")", "]", "*", a literal, a property name, or a named-value + legal = CheckLastToken(TokenKind.EndGroup, TokenKind.EndParameters, TokenKind.EndIndex, TokenKind.Wildcard, TokenKind.Null, TokenKind.Boolean, TokenKind.Number, TokenKind.String, TokenKind.PropertyName, TokenKind.NamedValue); + break; + } + break; + case TokenKind.Null: + case TokenKind.Boolean: + case TokenKind.Number: + case TokenKind.String: + // Is first or follows "," or "[" or "(" or a logical operator (e.g. "!" or "==" etc) + legal = CheckLastToken(null, TokenKind.Separator, TokenKind.StartIndex, TokenKind.StartGroup, TokenKind.StartParameters, TokenKind.LogicalOperator); + break; + case TokenKind.PropertyName: + // Follows "." + legal = CheckLastToken(TokenKind.Dereference); + break; + case TokenKind.Function: + // Is first or follows "," or "[" or "(" or a logical operator (e.g. "!" or "==" etc) + legal = CheckLastToken(null, TokenKind.Separator, TokenKind.StartIndex, TokenKind.StartGroup, TokenKind.StartParameters, TokenKind.LogicalOperator); + break; + case TokenKind.NamedValue: + // Is first or follows "," or "[" or "(" or a logical operator (e.g. "!" or "==" etc) + legal = CheckLastToken(null, TokenKind.Separator, TokenKind.StartIndex, TokenKind.StartGroup, TokenKind.StartParameters, TokenKind.LogicalOperator); + break; + } + + // Illegal + if (!legal) + { + return new Token(TokenKind.Unexpected, rawValue, index); + } + + // Legal so far + var token = new Token(kind, rawValue, index, parsedValue); + + switch (kind) + { + case TokenKind.StartGroup: // "(" logical grouping + case TokenKind.StartIndex: // "[" + case TokenKind.StartParameters: // "(" function call + // Track start token + m_unclosedTokens.Push(token); + break; + + case TokenKind.EndGroup: // ")" logical grouping + // Check inside logical grouping + if (m_unclosedTokens.FirstOrDefault()?.Kind != TokenKind.StartGroup) + { + return new Token(TokenKind.Unexpected, rawValue, index); + } + + // Pop start token + m_unclosedTokens.Pop(); + break; + + case TokenKind.EndIndex: // "]" + // Check inside indexer + if (m_unclosedTokens.FirstOrDefault()?.Kind != TokenKind.StartIndex) + { + return new Token(TokenKind.Unexpected, rawValue, index); + } + + // Pop start token + m_unclosedTokens.Pop(); + break; + + case TokenKind.EndParameters: // ")" function call + // Check inside function call + if (m_unclosedTokens.FirstOrDefault()?.Kind != TokenKind.StartParameters) + { + return new Token(TokenKind.Unexpected, rawValue, index); + } + + // Pop start token + m_unclosedTokens.Pop(); + break; + + case TokenKind.Separator: // "," + // Check inside function call + if (m_unclosedTokens.FirstOrDefault()?.Kind != TokenKind.StartParameters) + { + return new Token(TokenKind.Unexpected, rawValue, index); + } + break; + } + + return token; + } + + /// + /// Checks whether the last token kind is in the array of allowed kinds. + /// + private Boolean CheckLastToken(params TokenKind?[] allowed) + { + var lastKind = m_lastToken?.Kind; + foreach (var kind in allowed) + { + if (kind == lastKind) + { + return true; + } + } + + return false; + } + + private readonly String m_expression; // Raw expression string + private readonly Stack m_unclosedTokens = new Stack(); // Unclosed start tokens + private Int32 m_index; // Index of raw expression string + private Token m_lastToken; + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Tokens/Token.cs b/src/Sdk/DTExpressions2/Expressions2/Tokens/Token.cs new file mode 100644 index 00000000000..3f5903b4062 --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Tokens/Token.cs @@ -0,0 +1,209 @@ +using System; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Expressions2.Sdk.Operators; + +namespace GitHub.DistributedTask.Expressions2.Tokens +{ + internal sealed class Token + { + public Token( + TokenKind kind, + String rawValue, + Int32 index, + Object parsedValue = null) + { + Kind = kind; + RawValue = rawValue; + Index = index; + ParsedValue = parsedValue; + } + + public TokenKind Kind { get; } + + public String RawValue { get; } + + public Int32 Index { get; } + + public Object ParsedValue { get; } + + public Associativity Associativity + { + get + { + switch (Kind) + { + case TokenKind.StartGroup: + return Associativity.None; + case TokenKind.LogicalOperator: + switch (RawValue) + { + case ExpressionConstants.Not: // "!" + return Associativity.RightToLeft; + } + break; + } + + return IsOperator ? Associativity.LeftToRight : Associativity.None; + } + } + + public Boolean IsOperator + { + get + { + switch (Kind) + { + case TokenKind.StartGroup: // "(" logical grouping + case TokenKind.StartIndex: // "[" + case TokenKind.StartParameters: // "(" function call + case TokenKind.EndGroup: // ")" logical grouping + case TokenKind.EndIndex: // "]" + case TokenKind.EndParameters: // ")" function call + case TokenKind.Separator: // "," + case TokenKind.Dereference: // "." + case TokenKind.LogicalOperator: // "!", "==", etc + return true; + default: + return false; + } + } + } + + /// + /// Operator precedence. The value is only meaningful for operator tokens. + /// + public Int32 Precedence + { + get + { + switch (Kind) + { + case TokenKind.StartGroup: // "(" logical grouping + return 20; + case TokenKind.StartIndex: // "[" + case TokenKind.StartParameters: // "(" function call + case TokenKind.Dereference: // "." + return 19; + case TokenKind.LogicalOperator: + switch (RawValue) + { + case ExpressionConstants.Not: // "!" + return 16; + case ExpressionConstants.GreaterThan: // ">" + case ExpressionConstants.GreaterThanOrEqual:// ">=" + case ExpressionConstants.LessThan: // "<" + case ExpressionConstants.LessThanOrEqual: // "<=" + return 11; + case ExpressionConstants.Equal: // "==" + case ExpressionConstants.NotEqual: // "!=" + return 10; + case ExpressionConstants.And: // "&&" + return 6; + case ExpressionConstants.Or: // "||" + return 5; + } + break; + case TokenKind.EndGroup: // ")" logical grouping + case TokenKind.EndIndex: // "]" + case TokenKind.EndParameters: // ")" function call + case TokenKind.Separator: // "," + return 1; + } + + return 0; + } + } + + /// + /// Expected number of operands. The value is only meaningful for standalone unary operators and binary operators. + /// + public Int32 OperandCount + { + get + { + switch (Kind) + { + case TokenKind.StartIndex: // "[" + case TokenKind.Dereference: // "." + return 2; + case TokenKind.LogicalOperator: + switch (RawValue) + { + case ExpressionConstants.Not: // "!" + return 1; + case ExpressionConstants.GreaterThan: // ">" + case ExpressionConstants.GreaterThanOrEqual:// ">=" + case ExpressionConstants.LessThan: // "<" + case ExpressionConstants.LessThanOrEqual: // "<=" + case ExpressionConstants.Equal: // "==" + case ExpressionConstants.NotEqual: // "!=" + case ExpressionConstants.And: // "&&" + case ExpressionConstants.Or: // "|" + return 2; + } + break; + } + + return 0; + } + } + + public ExpressionNode ToNode() + { + switch (Kind) + { + case TokenKind.StartIndex: // "[" + case TokenKind.Dereference: // "." + return new Index(); + + case TokenKind.LogicalOperator: + switch (RawValue) + { + case ExpressionConstants.Not: // "!" + return new Not(); + + case ExpressionConstants.NotEqual: // "!=" + return new NotEqual(); + + case ExpressionConstants.GreaterThan: // ">" + return new GreaterThan(); + + case ExpressionConstants.GreaterThanOrEqual:// ">=" + return new GreaterThanOrEqual(); + + case ExpressionConstants.LessThan: // "<" + return new LessThan(); + + case ExpressionConstants.LessThanOrEqual: // "<=" + return new LessThanOrEqual(); + + case ExpressionConstants.Equal: // "==" + return new Equal(); + + case ExpressionConstants.And: // "&&" + return new And(); + + case ExpressionConstants.Or: // "||" + return new Or(); + + default: + throw new NotSupportedException($"Unexpected logical operator '{RawValue}' when creating node"); + } + + case TokenKind.Null: + case TokenKind.Boolean: + case TokenKind.Number: + case TokenKind.String: + return new Literal(ParsedValue); + + case TokenKind.PropertyName: + return new Literal(RawValue); + + case TokenKind.Wildcard: // "*" + return new Wildcard(); + } + + throw new NotSupportedException($"Unexpected kind '{Kind}' when creating node"); + } + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/Tokens/TokenKind.cs b/src/Sdk/DTExpressions2/Expressions2/Tokens/TokenKind.cs new file mode 100644 index 00000000000..dd7d81aed2d --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/Tokens/TokenKind.cs @@ -0,0 +1,28 @@ +namespace GitHub.DistributedTask.Expressions2.Tokens +{ + internal enum TokenKind + { + // Punctuation + StartGroup, // "(" logical grouping + StartIndex, // "[" + StartParameters, // "(" function call + EndGroup, // ")" logical grouping + EndIndex, // "]" + EndParameters, // ")" function call + Separator, // "," + Dereference, // "." + Wildcard, // "*" + LogicalOperator, // "!", "==", etc + + // Values + Null, + Boolean, + Number, + String, + PropertyName, + Function, + NamedValue, + + Unexpected, + } +} diff --git a/src/Sdk/DTExpressions2/Expressions2/ValueKind.cs b/src/Sdk/DTExpressions2/Expressions2/ValueKind.cs new file mode 100644 index 00000000000..ae9e4d6295d --- /dev/null +++ b/src/Sdk/DTExpressions2/Expressions2/ValueKind.cs @@ -0,0 +1,15 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Expressions2 +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public enum ValueKind + { + Array, + Boolean, + Null, + Number, + Object, + String, + } +} diff --git a/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs b/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs new file mode 100644 index 00000000000..408b407a3ff --- /dev/null +++ b/src/Sdk/DTGenerated/Generated/TaskAgentHttpClientBase.cs @@ -0,0 +1,8786 @@ +/* + * --------------------------------------------------------- + * Copyright(C) Microsoft Corporation. All rights reserved. + * --------------------------------------------------------- + * + * --------------------------------------------------------- + * Generated file, DO NOT EDIT + * --------------------------------------------------------- + * + * See following wiki page for instructions on how to regenerate: + * https://aka.ms/azure-devops-client-generation + * + * Configuration file: + * distributedtask\client\webapi\clientgeneratorconfigs\genclient.json + */ + +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.ComponentModel; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Http.Formatting; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [ResourceArea(TaskResourceIds.AreaId)] + public abstract class TaskAgentHttpClientBase : TaskAgentHttpClientCompatBase + { + public TaskAgentHttpClientBase(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public TaskAgentHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public TaskAgentHttpClientBase(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public TaskAgentHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public TaskAgentHttpClientBase(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task AddAgentCloudAsync( + TaskAgentCloud agentCloud, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("bfa72b3d-0fc6-43fb-932b-a7f6559f93b9"); + HttpContent content = new ObjectContent(agentCloud, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task DeleteAgentCloudAsync( + int agentCloudId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("bfa72b3d-0fc6-43fb-932b-a7f6559f93b9"); + object routeValues = new { agentCloudId = agentCloudId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetAgentCloudAsync( + int agentCloudId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("bfa72b3d-0fc6-43fb-932b-a7f6559f93b9"); + object routeValues = new { agentCloudId = agentCloudId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentCloudsAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("bfa72b3d-0fc6-43fb-932b-a7f6559f93b9"); + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get agent cloud types. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentCloudTypesAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("5932e193-f376-469d-9c3e-e5588ce12cb5"); + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForQueueAsync( + int queueId, + int top, + string continuationToken = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5f81ffb-f396-498d-85b1-5ada145e648a"); + object routeValues = new { queueId = queueId }; + + List> queryParams = new List>(); + queryParams.Add("$top", top.ToString(CultureInfo.InvariantCulture)); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task QueueAgentRequestAsync( + int queueId, + TaskAgentJobRequest request, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("f5f81ffb-f396-498d-85b1-5ada145e648a"); + object routeValues = new { queueId = queueId }; + HttpContent content = new ObjectContent(request, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Adds an agent to a pool. You probably don't want to call this endpoint directly. Instead, [configure an agent](https://docs.microsoft.com/azure/devops/pipelines/agents/agents) using the agent download package. + /// + /// The agent pool in which to add the agent + /// Details about the agent being added + /// + /// The cancellation token to cancel operation. + public virtual Task AddAgentAsync( + int poolId, + TaskAgent agent, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(agent, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete an agent. You probably don't want to call this endpoint directly. Instead, [use the agent configuration script](https://docs.microsoft.com/azure/devops/pipelines/agents/agents) to remove an agent from your organization. + /// + /// The pool ID to remove the agent from + /// The agent ID to remove + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteAgentAsync( + int poolId, + int agentId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId, agentId = agentId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get information about an agent. + /// + /// The agent pool containing the agent + /// The agent ID to get information about + /// Whether to include the agent's capabilities in the response + /// Whether to include details about the agent's current work + /// Whether to include details about the agents' most recent completed work + /// Filter which custom properties will be returned + /// + /// The cancellation token to cancel operation. + public virtual Task GetAgentAsync( + int poolId, + int agentId, + bool? includeCapabilities = null, + bool? includeAssignedRequest = null, + bool? includeLastCompletedRequest = null, + IEnumerable propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId, agentId = agentId }; + + List> queryParams = new List>(); + if (includeCapabilities != null) + { + queryParams.Add("includeCapabilities", includeCapabilities.Value.ToString()); + } + if (includeAssignedRequest != null) + { + queryParams.Add("includeAssignedRequest", includeAssignedRequest.Value.ToString()); + } + if (includeLastCompletedRequest != null) + { + queryParams.Add("includeLastCompletedRequest", includeLastCompletedRequest.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agents. + /// + /// The agent pool containing the agents + /// Filter on agent name + /// Whether to include the agents' capabilities in the response + /// Whether to include details about the agents' current work + /// Whether to include details about the agents' most recent completed work + /// Filter which custom properties will be returned + /// Filter by demands the agents can satisfy + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentsAsync( + int poolId, + string agentName = null, + bool? includeCapabilities = null, + bool? includeAssignedRequest = null, + bool? includeLastCompletedRequest = null, + IEnumerable propertyFilters = null, + IEnumerable demands = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + if (agentName != null) + { + queryParams.Add("agentName", agentName); + } + if (includeCapabilities != null) + { + queryParams.Add("includeCapabilities", includeCapabilities.Value.ToString()); + } + if (includeAssignedRequest != null) + { + queryParams.Add("includeAssignedRequest", includeAssignedRequest.Value.ToString()); + } + if (includeLastCompletedRequest != null) + { + queryParams.Add("includeLastCompletedRequest", includeLastCompletedRequest.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + if (demands != null && demands.Any()) + { + queryParams.Add("demands", string.Join(",", demands)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Replace an agent. You probably don't want to call this endpoint directly. Instead, [use the agent configuration script](https://docs.microsoft.com/azure/devops/pipelines/agents/agents) to remove and reconfigure an agent from your organization. + /// + /// The agent pool to use + /// The agent to replace + /// Updated details about the replacing agent + /// + /// The cancellation token to cancel operation. + public virtual Task ReplaceAgentAsync( + int poolId, + int agentId, + TaskAgent agent, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId, agentId = agentId }; + HttpContent content = new ObjectContent(agent, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update agent details. + /// + /// The agent pool to use + /// The agent to update + /// Updated details about the agent + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateAgentAsync( + int poolId, + int agentId, + TaskAgent agent, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId, agentId = agentId }; + HttpContent content = new ObjectContent(agent, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Returns list of azure subscriptions + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetAzureManagementGroupsAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("39fe3bf2-7ee0-4198-a469-4a29929afa9c"); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Returns list of azure subscriptions + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetAzureSubscriptionsAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("bcd6189c-0303-471f-a8e1-acb22b74d700"); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] GET a PAT token for managing (configuring, removing, tagging) deployment targets in a deployment group. + /// + /// Project ID or project name + /// ID of the deployment group in which deployment targets are managed. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GenerateDeploymentGroupAccessTokenAsync( + string project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("3d197ba2-c3e9-4253-882f-0ee2440f8174"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] GET a PAT token for managing (configuring, removing, tagging) deployment targets in a deployment group. + /// + /// Project ID + /// ID of the deployment group in which deployment targets are managed. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GenerateDeploymentGroupAccessTokenAsync( + Guid project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("3d197ba2-c3e9-4253-882f-0ee2440f8174"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Create a deployment group. + /// + /// Project ID or project name + /// Deployment group to create. + /// + /// The cancellation token to cancel operation. + public virtual Task AddDeploymentGroupAsync( + string project, + DeploymentGroupCreateParameter deploymentGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(deploymentGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a deployment group. + /// + /// Project ID + /// Deployment group to create. + /// + /// The cancellation token to cancel operation. + public virtual Task AddDeploymentGroupAsync( + Guid project, + DeploymentGroupCreateParameter deploymentGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(deploymentGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete a deployment group. + /// + /// Project ID or project name + /// ID of the deployment group to be deleted. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteDeploymentGroupAsync( + string project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Delete a deployment group. + /// + /// Project ID + /// ID of the deployment group to be deleted. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteDeploymentGroupAsync( + Guid project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get a deployment group by its ID. + /// + /// Project ID or project name + /// ID of the deployment group. + /// Get the deployment group only if this action can be performed on it. + /// Include these additional details in the returned object. + /// + /// The cancellation token to cancel operation. + public virtual Task GetDeploymentGroupAsync( + string project, + int deploymentGroupId, + DeploymentGroupActionFilter? actionFilter = null, + DeploymentGroupExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a deployment group by its ID. + /// + /// Project ID + /// ID of the deployment group. + /// Get the deployment group only if this action can be performed on it. + /// Include these additional details in the returned object. + /// + /// The cancellation token to cancel operation. + public virtual Task GetDeploymentGroupAsync( + Guid project, + int deploymentGroupId, + DeploymentGroupActionFilter? actionFilter = null, + DeploymentGroupExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of deployment groups by name or IDs. + /// + /// Project ID or project name + /// Name of the deployment group. + /// Get only deployment groups on which this action can be performed. + /// Include these additional details in the returned objects. + /// Get deployment groups with names greater than this continuationToken lexicographically. + /// Maximum number of deployment groups to return. Default is **1000**. + /// Comma separated list of IDs of the deployment groups. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDeploymentGroupsAsync( + string project, + string name = null, + DeploymentGroupActionFilter? actionFilter = null, + DeploymentGroupExpands? expand = null, + string continuationToken = null, + int? top = null, + IEnumerable ids = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (name != null) + { + queryParams.Add("name", name); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (ids != null && ids.Any()) + { + queryParams.Add("ids", string.Join(",", ids)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of deployment groups by name or IDs. + /// + /// Project ID + /// Name of the deployment group. + /// Get only deployment groups on which this action can be performed. + /// Include these additional details in the returned objects. + /// Get deployment groups with names greater than this continuationToken lexicographically. + /// Maximum number of deployment groups to return. Default is **1000**. + /// Comma separated list of IDs of the deployment groups. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDeploymentGroupsAsync( + Guid project, + string name = null, + DeploymentGroupActionFilter? actionFilter = null, + DeploymentGroupExpands? expand = null, + string continuationToken = null, + int? top = null, + IEnumerable ids = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (name != null) + { + queryParams.Add("name", name); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (ids != null && ids.Any()) + { + queryParams.Add("ids", string.Join(",", ids)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Update a deployment group. + /// + /// Project ID or project name + /// ID of the deployment group. + /// Deployment group to update. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateDeploymentGroupAsync( + string project, + int deploymentGroupId, + DeploymentGroupUpdateParameter deploymentGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent(deploymentGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a deployment group. + /// + /// Project ID + /// ID of the deployment group. + /// Deployment group to update. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateDeploymentGroupAsync( + Guid project, + int deploymentGroupId, + DeploymentGroupUpdateParameter deploymentGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("083c4d89-ab35-45af-aa11-7cf66895c53e"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent(deploymentGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Get a list of deployment group metrics. + /// + /// Project ID or project name + /// Name of the deployment group. + /// Get metrics for deployment groups with names greater than this continuationToken lexicographically. + /// Maximum number of deployment group metrics to return. Default is **50**. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentGroupsMetricsAsync( + string project, + string deploymentGroupName = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("281c6308-427a-49e1-b83a-dac0f4862189"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (deploymentGroupName != null) + { + queryParams.Add("deploymentGroupName", deploymentGroupName); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of deployment group metrics. + /// + /// Project ID + /// Name of the deployment group. + /// Get metrics for deployment groups with names greater than this continuationToken lexicographically. + /// Maximum number of deployment group metrics to return. Default is **50**. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentGroupsMetricsAsync( + Guid project, + string deploymentGroupName = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("281c6308-427a-49e1-b83a-dac0f4862189"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (deploymentGroupName != null) + { + queryParams.Add("deploymentGroupName", deploymentGroupName); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentMachineAsync( + string project, + int deploymentGroupId, + int machineId, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a3540e5b-f0dc-4668-963b-b752459be545"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + queryParams.Add("machineId", machineId.ToString(CultureInfo.InvariantCulture)); + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentMachineAsync( + Guid project, + int deploymentGroupId, + int machineId, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a3540e5b-f0dc-4668-963b-b752459be545"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + queryParams.Add("machineId", machineId.ToString(CultureInfo.InvariantCulture)); + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentMachinesAsync( + string project, + int deploymentGroupId, + IEnumerable machineIds = null, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a3540e5b-f0dc-4668-963b-b752459be545"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (machineIds != null && machineIds.Any()) + { + queryParams.Add("machineIds", string.Join(",", machineIds)); + } + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentMachinesAsync( + Guid project, + int deploymentGroupId, + IEnumerable machineIds = null, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a3540e5b-f0dc-4668-963b-b752459be545"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (machineIds != null && machineIds.Any()) + { + queryParams.Add("machineIds", string.Join(",", machineIds)); + } + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task RefreshDeploymentMachinesAsync( + string project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("91006ac4-0f68-4d82-a2bc-540676bd73ce"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task RefreshDeploymentMachinesAsync( + Guid project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("91006ac4-0f68-4d82-a2bc-540676bd73ce"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] GET a PAT token for managing (configuring, removing, tagging) deployment agents in a deployment pool. + /// + /// ID of the deployment pool in which deployment agents are managed. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GenerateDeploymentPoolAccessTokenAsync( + int poolId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("e077ee4a-399b-420b-841f-c43fbc058e0b"); + object routeValues = new { poolId = poolId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of deployment pool summaries. + /// + /// Name of the deployment pool. + /// Include these additional details in the returned objects. + /// List of deployment pool ids. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentPoolsSummaryAsync( + string poolName = null, + DeploymentPoolSummaryExpands? expands = null, + IEnumerable poolIds = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6525d6c6-258f-40e0-a1a9-8a24a3957625"); + + List> queryParams = new List>(); + if (poolName != null) + { + queryParams.Add("poolName", poolName); + } + if (expands != null) + { + queryParams.Add("expands", expands.Value.ToString()); + } + if (poolIds != null && poolIds.Any()) + { + queryParams.Add("poolIds", string.Join(",", poolIds)); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get agent requests for a deployment target. + /// + /// Project ID or project name + /// ID of the deployment group to which the target belongs. + /// ID of the deployment target. + /// Maximum number of completed requests to return. Default is **50** + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentTargetAsync( + string project, + int deploymentGroupId, + int targetId, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2fac0be3-8c8f-4473-ab93-c1389b08a2c9"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + queryParams.Add("targetId", targetId.ToString(CultureInfo.InvariantCulture)); + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get agent requests for a deployment target. + /// + /// Project ID + /// ID of the deployment group to which the target belongs. + /// ID of the deployment target. + /// Maximum number of completed requests to return. Default is **50** + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentTargetAsync( + Guid project, + int deploymentGroupId, + int targetId, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2fac0be3-8c8f-4473-ab93-c1389b08a2c9"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + queryParams.Add("targetId", targetId.ToString(CultureInfo.InvariantCulture)); + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get agent requests for a list deployment targets. + /// + /// Project ID or project name + /// ID of the deployment group to which the targets belong. + /// Comma separated list of IDs of the deployment targets. + /// Id of owner of agent job request. + /// Datetime to return request after this time. + /// Maximum number of completed requests to return for each target. Default is **50** + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentTargetsAsync( + string project, + int deploymentGroupId, + IEnumerable targetIds = null, + int? ownerId = null, + DateTime? completedOn = null, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2fac0be3-8c8f-4473-ab93-c1389b08a2c9"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (targetIds != null && targetIds.Any()) + { + queryParams.Add("targetIds", string.Join(",", targetIds)); + } + if (ownerId != null) + { + queryParams.Add("ownerId", ownerId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (completedOn != null) + { + AddDateTimeToQueryParams(queryParams, "completedOn", completedOn.Value); + } + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get agent requests for a list deployment targets. + /// + /// Project ID + /// ID of the deployment group to which the targets belong. + /// Comma separated list of IDs of the deployment targets. + /// Id of owner of agent job request. + /// Datetime to return request after this time. + /// Maximum number of completed requests to return for each target. Default is **50** + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForDeploymentTargetsAsync( + Guid project, + int deploymentGroupId, + IEnumerable targetIds = null, + int? ownerId = null, + DateTime? completedOn = null, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2fac0be3-8c8f-4473-ab93-c1389b08a2c9"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (targetIds != null && targetIds.Any()) + { + queryParams.Add("targetIds", string.Join(",", targetIds)); + } + if (ownerId != null) + { + queryParams.Add("ownerId", ownerId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (completedOn != null) + { + AddDateTimeToQueryParams(queryParams, "completedOn", completedOn.Value); + } + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Upgrade the deployment targets in a deployment group. + /// + /// Project ID or project name + /// ID of the deployment group. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task RefreshDeploymentTargetsAsync( + string project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("1c1a817f-f23d-41c6-bf8d-14b638f64152"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Upgrade the deployment targets in a deployment group. + /// + /// Project ID + /// ID of the deployment group. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task RefreshDeploymentTargetsAsync( + Guid project, + int deploymentGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("1c1a817f-f23d-41c6-bf8d-14b638f64152"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Proxy for a GET request defined by an 'endpoint'. The request is authorized using a service connection. The response is filtered using an XPath/Json based selector. + /// + /// Describes the URL to fetch. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> QueryEndpointAsync( + TaskDefinitionEndpoint endpoint, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("f223b809-8c33-4b7d-b53f-07232569b5d6"); + HttpContent content = new ObjectContent(endpoint, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Get environment deployment execution history + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetEnvironmentDeploymentExecutionRecordsAsync( + string project, + int environmentId, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("51bb5d21-4305-4ea6-9dbb-b7488af73334"); + object routeValues = new { project = project, environmentId = environmentId }; + + List> queryParams = new List>(); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get environment deployment execution history + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetEnvironmentDeploymentExecutionRecordsAsync( + Guid project, + int environmentId, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("51bb5d21-4305-4ea6-9dbb-b7488af73334"); + object routeValues = new { project = project, environmentId = environmentId }; + + List> queryParams = new List>(); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Create an environment. + /// + /// Project ID or project name + /// Environment to create. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddEnvironmentAsync( + string project, + EnvironmentCreateParameter environmentCreateParameter, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(environmentCreateParameter, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create an environment. + /// + /// Project ID + /// Environment to create. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddEnvironmentAsync( + Guid project, + EnvironmentCreateParameter environmentCreateParameter, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(environmentCreateParameter, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete the specified environment. + /// + /// Project ID or project name + /// ID of the environment. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteEnvironmentAsync( + string project, + int environmentId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project, environmentId = environmentId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Delete the specified environment. + /// + /// Project ID + /// ID of the environment. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteEnvironmentAsync( + Guid project, + int environmentId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project, environmentId = environmentId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get an environment by its ID. + /// + /// Project ID or project name + /// ID of the environment. + /// Include these additional details in the returned objects. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetEnvironmentByIdAsync( + string project, + int environmentId, + EnvironmentExpands? expands = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project, environmentId = environmentId }; + + List> queryParams = new List>(); + if (expands != null) + { + queryParams.Add("expands", expands.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get an environment by its ID. + /// + /// Project ID + /// ID of the environment. + /// Include these additional details in the returned objects. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetEnvironmentByIdAsync( + Guid project, + int environmentId, + EnvironmentExpands? expands = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project, environmentId = environmentId }; + + List> queryParams = new List>(); + if (expands != null) + { + queryParams.Add("expands", expands.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get all environments. + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetEnvironmentsAsync( + string project, + string name = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (name != null) + { + queryParams.Add("name", name); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get all environments. + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetEnvironmentsAsync( + Guid project, + string name = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (name != null) + { + queryParams.Add("name", name); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Update the specified environment. + /// + /// Project ID or project name + /// ID of the environment. + /// Environment data to update. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateEnvironmentAsync( + string project, + int environmentId, + EnvironmentUpdateParameter environmentUpdateParameter, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(environmentUpdateParameter, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update the specified environment. + /// + /// Project ID + /// ID of the environment. + /// Environment data to update. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateEnvironmentAsync( + Guid project, + int environmentId, + EnvironmentUpdateParameter environmentUpdateParameter, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("8572b1fc-2482-47fa-8f74-7e3ed53ee54b"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(environmentUpdateParameter, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetTaskHubLicenseDetailsAsync( + string hubName, + bool? includeEnterpriseUsersCount = null, + bool? includeHostedAgentMinutesCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f9f0f436-b8a1-4475-9041-1ccdbf8f0128"); + object routeValues = new { hubName = hubName }; + + List> queryParams = new List>(); + if (includeEnterpriseUsersCount != null) + { + queryParams.Add("includeEnterpriseUsersCount", includeEnterpriseUsersCount.Value.ToString()); + } + if (includeHostedAgentMinutesCount != null) + { + queryParams.Add("includeHostedAgentMinutesCount", includeHostedAgentMinutesCount.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 3), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateTaskHubLicenseDetailsAsync( + string hubName, + TaskHubLicenseDetails taskHubLicenseDetails, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("f9f0f436-b8a1-4475-9041-1ccdbf8f0128"); + object routeValues = new { hubName = hubName }; + HttpContent content = new ObjectContent(taskHubLicenseDetails, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 3), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetTaskIconAsync( + Guid taskId, + string versionString, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("63463108-174d-49d4-b8cb-235eea42a5e1"); + object routeValues = new { taskId = taskId, versionString = versionString }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task ValidateInputsAsync( + InputValidationRequest inputValidationRequest, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("58475b1e-adaf-4155-9bc1-e04bf1fff4c2"); + HttpContent content = new ObjectContent(inputValidationRequest, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteAgentRequestAsync( + int poolId, + long requestId, + Guid lockToken, + TaskResult? result = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId, requestId = requestId }; + + List> queryParams = new List>(); + queryParams.Add("lockToken", lockToken.ToString()); + if (result != null) + { + queryParams.Add("result", result.Value.ToString()); + } + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetAgentRequestAsync( + int poolId, + long requestId, + bool? includeStatus = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId, requestId = requestId }; + + List> queryParams = new List>(); + if (includeStatus != null) + { + queryParams.Add("includeStatus", includeStatus.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsAsync( + int poolId, + int top, + string continuationToken = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + queryParams.Add("$top", top.ToString(CultureInfo.InvariantCulture)); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForAgentAsync( + int poolId, + int agentId, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + queryParams.Add("agentId", agentId.ToString(CultureInfo.InvariantCulture)); + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForAgentsAsync( + int poolId, + IEnumerable agentIds = null, + int? completedRequestCount = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + if (agentIds != null && agentIds.Any()) + { + queryParams.Add("agentIds", string.Join(",", agentIds)); + } + if (completedRequestCount != null) + { + queryParams.Add("completedRequestCount", completedRequestCount.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentRequestsForPlanAsync( + int poolId, + Guid planId, + Guid? jobId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + queryParams.Add("planId", planId.ToString()); + if (jobId != null) + { + queryParams.Add("jobId", jobId.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task QueueAgentRequestByPoolAsync( + int poolId, + TaskAgentJobRequest request, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(request, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateAgentRequestAsync( + int poolId, + long requestId, + Guid lockToken, + TaskAgentJobRequest request, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f"); + object routeValues = new { poolId = poolId, requestId = requestId }; + HttpContent content = new ObjectContent(request, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("lockToken", lockToken.ToString()); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddKubernetesResourceAsync( + string project, + int environmentId, + KubernetesResourceCreateParameters createParameters, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("73fba52f-15ab-42b3-a538-ce67a9223a04"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(createParameters, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddKubernetesResourceAsync( + Guid project, + int environmentId, + KubernetesResourceCreateParameters createParameters, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("73fba52f-15ab-42b3-a538-ce67a9223a04"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(createParameters, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteKubernetesResourceAsync( + string project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("73fba52f-15ab-42b3-a538-ce67a9223a04"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteKubernetesResourceAsync( + Guid project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("73fba52f-15ab-42b3-a538-ce67a9223a04"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetKubernetesResourceAsync( + string project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("73fba52f-15ab-42b3-a538-ce67a9223a04"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetKubernetesResourceAsync( + Guid project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("73fba52f-15ab-42b3-a538-ce67a9223a04"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GenerateDeploymentMachineGroupAccessTokenAsync( + string project, + int machineGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("f8c7c0de-ac0d-469b-9cb1-c21f72d67693"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GenerateDeploymentMachineGroupAccessTokenAsync( + Guid project, + int machineGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("f8c7c0de-ac0d-469b-9cb1-c21f72d67693"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddDeploymentMachineGroupAsync( + string project, + DeploymentMachineGroup machineGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(machineGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddDeploymentMachineGroupAsync( + Guid project, + DeploymentMachineGroup machineGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(machineGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteDeploymentMachineGroupAsync( + string project, + int machineGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteDeploymentMachineGroupAsync( + Guid project, + int machineGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetDeploymentMachineGroupAsync( + string project, + int machineGroupId, + MachineGroupActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetDeploymentMachineGroupAsync( + Guid project, + int machineGroupId, + MachineGroupActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentMachineGroupsAsync( + string project, + string machineGroupName = null, + MachineGroupActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (machineGroupName != null) + { + queryParams.Add("machineGroupName", machineGroupName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentMachineGroupsAsync( + Guid project, + string machineGroupName = null, + MachineGroupActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (machineGroupName != null) + { + queryParams.Add("machineGroupName", machineGroupName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateDeploymentMachineGroupAsync( + string project, + int machineGroupId, + DeploymentMachineGroup machineGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + HttpContent content = new ObjectContent(machineGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateDeploymentMachineGroupAsync( + Guid project, + int machineGroupId, + DeploymentMachineGroup machineGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("d4adf50f-80c6-4ac8-9ca1-6e4e544286e9"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + HttpContent content = new ObjectContent(machineGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentMachineGroupMachinesAsync( + string project, + int machineGroupId, + IEnumerable tagFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("966c3874-c347-4b18-a90c-d509116717fd"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + List> queryParams = new List>(); + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentMachineGroupMachinesAsync( + Guid project, + int machineGroupId, + IEnumerable tagFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("966c3874-c347-4b18-a90c-d509116717fd"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + + List> queryParams = new List>(); + if (tagFilters != null && tagFilters.Any()) + { + queryParams.Add("tagFilters", string.Join(",", tagFilters)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateDeploymentMachineGroupMachinesAsync( + string project, + int machineGroupId, + IEnumerable deploymentMachines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("966c3874-c347-4b18-a90c-d509116717fd"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + HttpContent content = new ObjectContent>(deploymentMachines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateDeploymentMachineGroupMachinesAsync( + Guid project, + int machineGroupId, + IEnumerable deploymentMachines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("966c3874-c347-4b18-a90c-d509116717fd"); + object routeValues = new { project = project, machineGroupId = machineGroupId }; + HttpContent content = new ObjectContent>(deploymentMachines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddDeploymentMachineAsync( + string project, + int deploymentGroupId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddDeploymentMachineAsync( + Guid project, + int deploymentGroupId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteDeploymentMachineAsync( + string project, + int deploymentGroupId, + int machineId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteDeploymentMachineAsync( + Guid project, + int deploymentGroupId, + int machineId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetDeploymentMachineAsync( + string project, + int deploymentGroupId, + int machineId, + DeploymentMachineExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + + List> queryParams = new List>(); + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetDeploymentMachineAsync( + Guid project, + int deploymentGroupId, + int machineId, + DeploymentMachineExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + + List> queryParams = new List>(); + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentMachinesAsync( + string project, + int deploymentGroupId, + IEnumerable tags = null, + string name = null, + DeploymentMachineExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (name != null) + { + queryParams.Add("name", name); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetDeploymentMachinesAsync( + Guid project, + int deploymentGroupId, + IEnumerable tags = null, + string name = null, + DeploymentMachineExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (name != null) + { + queryParams.Add("name", name); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task ReplaceDeploymentMachineAsync( + string project, + int deploymentGroupId, + int machineId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task ReplaceDeploymentMachineAsync( + Guid project, + int deploymentGroupId, + int machineId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateDeploymentMachineAsync( + string project, + int deploymentGroupId, + int machineId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateDeploymentMachineAsync( + Guid project, + int deploymentGroupId, + int machineId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, machineId = machineId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateDeploymentMachinesAsync( + string project, + int deploymentGroupId, + IEnumerable machines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent>(machines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateDeploymentMachinesAsync( + Guid project, + int deploymentGroupId, + IEnumerable machines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6f6d406f-cfe6-409c-9327-7009928077e7"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent>(machines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task CreateAgentPoolMaintenanceDefinitionAsync( + int poolId, + TaskAgentPoolMaintenanceDefinition definition, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("80572e16-58f0-4419-ac07-d19fde32195c"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(definition, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteAgentPoolMaintenanceDefinitionAsync( + int poolId, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("80572e16-58f0-4419-ac07-d19fde32195c"); + object routeValues = new { poolId = poolId, definitionId = definitionId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetAgentPoolMaintenanceDefinitionAsync( + int poolId, + int definitionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("80572e16-58f0-4419-ac07-d19fde32195c"); + object routeValues = new { poolId = poolId, definitionId = definitionId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentPoolMaintenanceDefinitionsAsync( + int poolId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("80572e16-58f0-4419-ac07-d19fde32195c"); + object routeValues = new { poolId = poolId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateAgentPoolMaintenanceDefinitionAsync( + int poolId, + int definitionId, + TaskAgentPoolMaintenanceDefinition definition, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("80572e16-58f0-4419-ac07-d19fde32195c"); + object routeValues = new { poolId = poolId, definitionId = definitionId }; + HttpContent content = new ObjectContent(definition, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteAgentPoolMaintenanceJobAsync( + int poolId, + int jobId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("15e7ab6e-abce-4601-a6d8-e111fe148f46"); + object routeValues = new { poolId = poolId, jobId = jobId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetAgentPoolMaintenanceJobAsync( + int poolId, + int jobId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("15e7ab6e-abce-4601-a6d8-e111fe148f46"); + object routeValues = new { poolId = poolId, jobId = jobId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task GetAgentPoolMaintenanceJobLogsAsync( + int poolId, + int jobId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("15e7ab6e-abce-4601-a6d8-e111fe148f46"); + object routeValues = new { poolId = poolId, jobId = jobId }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetAgentPoolMaintenanceJobsAsync( + int poolId, + int? definitionId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("15e7ab6e-abce-4601-a6d8-e111fe148f46"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + if (definitionId != null) + { + queryParams.Add("definitionId", definitionId.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task QueueAgentPoolMaintenanceJobAsync( + int poolId, + TaskAgentPoolMaintenanceJob job, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("15e7ab6e-abce-4601-a6d8-e111fe148f46"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(job, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateAgentPoolMaintenanceJobAsync( + int poolId, + int jobId, + TaskAgentPoolMaintenanceJob job, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("15e7ab6e-abce-4601-a6d8-e111fe148f46"); + object routeValues = new { poolId = poolId, jobId = jobId }; + HttpContent content = new ObjectContent(job, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteMessageAsync( + int poolId, + long messageId, + Guid sessionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("c3a054f6-7a8a-49c0-944e-3a8e5d7adfd7"); + object routeValues = new { poolId = poolId, messageId = messageId }; + + List> queryParams = new List>(); + queryParams.Add("sessionId", sessionId.ToString()); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetMessageAsync( + int poolId, + Guid sessionId, + long? lastMessageId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("c3a054f6-7a8a-49c0-944e-3a8e5d7adfd7"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + queryParams.Add("sessionId", sessionId.ToString()); + if (lastMessageId != null) + { + queryParams.Add("lastMessageId", lastMessageId.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task RefreshAgentAsync( + int poolId, + int agentId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("c3a054f6-7a8a-49c0-944e-3a8e5d7adfd7"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + queryParams.Add("agentId", agentId.ToString(CultureInfo.InvariantCulture)); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task RefreshAgentsAsync( + int poolId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("c3a054f6-7a8a-49c0-944e-3a8e5d7adfd7"); + object routeValues = new { poolId = poolId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task SendMessageAsync( + int poolId, + long requestId, + TaskAgentMessage message, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("c3a054f6-7a8a-49c0-944e-3a8e5d7adfd7"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(message, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("requestId", requestId.ToString(CultureInfo.InvariantCulture)); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetPackageAsync( + string packageType, + string platform, + string version, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8ffcd551-079c-493a-9c02-54346299d144"); + object routeValues = new { packageType = packageType, platform = platform, version = version }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 2), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetPackagesAsync( + string packageType, + string platform = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8ffcd551-079c-493a-9c02-54346299d144"); + object routeValues = new { packageType = packageType, platform = platform }; + + List> queryParams = new List>(); + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task GetAgentPoolMetadataAsync( + int poolId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0d62f887-9f53-48b9-9161-4c35d5735b0f"); + object routeValues = new { poolId = poolId }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Create an agent pool. + /// + /// Details about the new agent pool + /// + /// The cancellation token to cancel operation. + public virtual Task AddAgentPoolAsync( + TaskAgentPool pool, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("a8c47e17-4d56-4a56-92bb-de7ea7dc65be"); + HttpContent content = new ObjectContent(pool, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete an agent pool. + /// + /// ID of the agent pool to delete + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteAgentPoolAsync( + int poolId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("a8c47e17-4d56-4a56-92bb-de7ea7dc65be"); + object routeValues = new { poolId = poolId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get information about an agent pool. + /// + /// An agent pool ID + /// Agent pool properties (comma-separated) + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task GetAgentPoolAsync( + int poolId, + IEnumerable properties = null, + TaskAgentPoolActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a8c47e17-4d56-4a56-92bb-de7ea7dc65be"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent pools. + /// + /// Filter by name + /// Filter by agent pool properties (comma-separated) + /// Filter by pool type + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentPoolsAsync( + string poolName = null, + IEnumerable properties = null, + TaskAgentPoolType? poolType = null, + TaskAgentPoolActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a8c47e17-4d56-4a56-92bb-de7ea7dc65be"); + + List> queryParams = new List>(); + if (poolName != null) + { + queryParams.Add("poolName", poolName); + } + if (properties != null && properties.Any()) + { + queryParams.Add("properties", string.Join(",", properties)); + } + if (poolType != null) + { + queryParams.Add("poolType", poolType.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent pools. + /// + /// pool Ids to fetch + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentPoolsByIdsAsync( + IEnumerable poolIds, + TaskAgentPoolActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("a8c47e17-4d56-4a56-92bb-de7ea7dc65be"); + + List> queryParams = new List>(); + string poolIdsAsString = null; + if (poolIds != null) + { + poolIdsAsString = string.Join(",", poolIds); + } + queryParams.Add("poolIds", poolIdsAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Update properties on an agent pool + /// + /// The agent pool to update + /// Updated agent pool details + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateAgentPoolAsync( + int poolId, + TaskAgentPool pool, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("a8c47e17-4d56-4a56-92bb-de7ea7dc65be"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(pool, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a new agent queue to connect a project to an agent pool. + /// + /// Details about the queue to create + /// Automatically authorize this queue when using YAML + /// + /// The cancellation token to cancel operation. + public virtual Task AddAgentQueueAsync( + TaskAgentQueue queue, + bool? authorizePipelines = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + HttpContent content = new ObjectContent(queue, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (authorizePipelines != null) + { + queryParams.Add("authorizePipelines", authorizePipelines.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a new agent queue to connect a project to an agent pool. + /// + /// Project ID or project name + /// Details about the queue to create + /// Automatically authorize this queue when using YAML + /// + /// The cancellation token to cancel operation. + public virtual Task AddAgentQueueAsync( + string project, + TaskAgentQueue queue, + bool? authorizePipelines = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(queue, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (authorizePipelines != null) + { + queryParams.Add("authorizePipelines", authorizePipelines.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a new agent queue to connect a project to an agent pool. + /// + /// Project ID + /// Details about the queue to create + /// Automatically authorize this queue when using YAML + /// + /// The cancellation token to cancel operation. + public virtual Task AddAgentQueueAsync( + Guid project, + TaskAgentQueue queue, + bool? authorizePipelines = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(queue, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (authorizePipelines != null) + { + queryParams.Add("authorizePipelines", authorizePipelines.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a new team project. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task CreateTeamProjectAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Create a new team project. + /// + /// Project ID or project name + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task CreateTeamProjectAsync( + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Create a new team project. + /// + /// Project ID + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task CreateTeamProjectAsync( + Guid project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Removes an agent queue from a project. + /// + /// The agent queue to remove + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteAgentQueueAsync( + int queueId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { queueId = queueId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Removes an agent queue from a project. + /// + /// Project ID or project name + /// The agent queue to remove + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteAgentQueueAsync( + string project, + int queueId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project, queueId = queueId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Removes an agent queue from a project. + /// + /// Project ID + /// The agent queue to remove + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteAgentQueueAsync( + Guid project, + int queueId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project, queueId = queueId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get information about an agent queue. + /// + /// Project ID or project name + /// The agent queue to get information about + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task GetAgentQueueAsync( + string project, + int queueId, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project, queueId = queueId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get information about an agent queue. + /// + /// Project ID + /// The agent queue to get information about + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task GetAgentQueueAsync( + Guid project, + int queueId, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project, queueId = queueId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get information about an agent queue. + /// + /// The agent queue to get information about + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task GetAgentQueueAsync( + int queueId, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { queueId = queueId }; + + List> queryParams = new List>(); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues. + /// + /// Project ID or project name + /// Filter on the agent queue name + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesAsync( + string project, + string queueName = null, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (queueName != null) + { + queryParams.Add("queueName", queueName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues. + /// + /// Project ID + /// Filter on the agent queue name + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesAsync( + Guid project, + string queueName = null, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (queueName != null) + { + queryParams.Add("queueName", queueName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues. + /// + /// Filter on the agent queue name + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesAsync( + string queueName = null, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + + List> queryParams = new List>(); + if (queueName != null) + { + queryParams.Add("queueName", queueName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues by their IDs + /// + /// Project ID or project name + /// A comma-separated list of agent queue IDs to retrieve + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesByIdsAsync( + string project, + IEnumerable queueIds, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string queueIdsAsString = null; + if (queueIds != null) + { + queueIdsAsString = string.Join(",", queueIds); + } + queryParams.Add("queueIds", queueIdsAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues by their IDs + /// + /// Project ID + /// A comma-separated list of agent queue IDs to retrieve + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesByIdsAsync( + Guid project, + IEnumerable queueIds, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string queueIdsAsString = null; + if (queueIds != null) + { + queueIdsAsString = string.Join(",", queueIds); + } + queryParams.Add("queueIds", queueIdsAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues by their IDs + /// + /// A comma-separated list of agent queue IDs to retrieve + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesByIdsAsync( + IEnumerable queueIds, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + + List> queryParams = new List>(); + string queueIdsAsString = null; + if (queueIds != null) + { + queueIdsAsString = string.Join(",", queueIds); + } + queryParams.Add("queueIds", queueIdsAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues by their names + /// + /// Project ID or project name + /// A comma-separated list of agent names to retrieve + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesByNamesAsync( + string project, + IEnumerable queueNames, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string queueNamesAsString = null; + if (queueNames != null) + { + queueNamesAsString = string.Join(",", queueNames); + } + queryParams.Add("queueNames", queueNamesAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues by their names + /// + /// Project ID + /// A comma-separated list of agent names to retrieve + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesByNamesAsync( + Guid project, + IEnumerable queueNames, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string queueNamesAsString = null; + if (queueNames != null) + { + queueNamesAsString = string.Join(",", queueNames); + } + queryParams.Add("queueNames", queueNamesAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agent queues by their names + /// + /// A comma-separated list of agent names to retrieve + /// Filter by whether the calling user has use or manage permissions + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentQueuesByNamesAsync( + IEnumerable queueNames, + TaskAgentQueueActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("900fa995-c559-4923-aae7-f8424fe4fbea"); + + List> queryParams = new List>(); + string queueNamesAsString = null; + if (queueNames != null) + { + queueNamesAsString = string.Join(",", queueNames); + } + queryParams.Add("queueNames", queueNamesAsString); + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAgentCloudRequestsAsync( + int agentCloudId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("20189bd7-5134-49c2-b8e9-f9e856eea2b2"); + object routeValues = new { agentCloudId = agentCloudId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetResourceLimitsAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1f1f0557-c445-42a6-b4a0-0df605a3a0f8"); + + return SendAsync>( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetResourceUsageAsync( + string parallelismTag = null, + bool? poolIsHosted = null, + bool? includeRunningRequests = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("eae1d376-a8b1-4475-9041-1dfdbe8f0143"); + + List> queryParams = new List>(); + if (parallelismTag != null) + { + queryParams.Add("parallelismTag", parallelismTag); + } + if (poolIsHosted != null) + { + queryParams.Add("poolIsHosted", poolIsHosted.Value.ToString()); + } + if (includeRunningRequests != null) + { + queryParams.Add("includeRunningRequests", includeRunningRequests.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 2), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetTaskGroupHistoryAsync( + string project, + Guid taskGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("100cc92a-b255-47fa-9ab3-e44a2985a3ac"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetTaskGroupHistoryAsync( + Guid project, + Guid taskGroupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("100cc92a-b255-47fa-9ab3-e44a2985a3ac"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Delete a secure file + /// + /// Project ID or project name + /// The unique secure file Id + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteSecureFileAsync( + string project, + Guid secureFileId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Delete a secure file + /// + /// Project ID + /// The unique secure file Id + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteSecureFileAsync( + Guid project, + Guid secureFileId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Download a secure file by Id + /// + /// Project ID or project name + /// The unique secure file Id + /// A valid download ticket + /// If download is true, the file is sent as attachement in the response body. If download is false, the response body contains the file stream. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DownloadSecureFileAsync( + string project, + Guid secureFileId, + string ticket, + bool? download = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + + List> queryParams = new List>(); + queryParams.Add("ticket", ticket); + if (download != null) + { + queryParams.Add("download", download.Value.ToString()); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + queryParameters: queryParams, + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Download a secure file by Id + /// + /// Project ID + /// The unique secure file Id + /// A valid download ticket + /// If download is true, the file is sent as attachement in the response body. If download is false, the response body contains the file stream. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DownloadSecureFileAsync( + Guid project, + Guid secureFileId, + string ticket, + bool? download = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + + List> queryParams = new List>(); + queryParams.Add("ticket", ticket); + if (download != null) + { + queryParams.Add("download", download.Value.ToString()); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + queryParameters: queryParams, + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] Get a secure file + /// + /// Project ID or project name + /// The unique secure file Id + /// If includeDownloadTicket is true and the caller has permissions, a download ticket is included in the response. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetSecureFileAsync( + string project, + Guid secureFileId, + bool? includeDownloadTicket = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + + List> queryParams = new List>(); + if (includeDownloadTicket != null) + { + queryParams.Add("includeDownloadTicket", includeDownloadTicket.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a secure file + /// + /// Project ID + /// The unique secure file Id + /// If includeDownloadTicket is true and the caller has permissions, a download ticket is included in the response. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetSecureFileAsync( + Guid project, + Guid secureFileId, + bool? includeDownloadTicket = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + + List> queryParams = new List>(); + if (includeDownloadTicket != null) + { + queryParams.Add("includeDownloadTicket", includeDownloadTicket.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID or project name + /// Name of the secure file to match. Can include wildcards to match multiple files. + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// Filter by secure file permissions for View, Manage or Use action. Defaults to View. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetSecureFilesAsync( + string project, + string namePattern = null, + bool? includeDownloadTickets = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (namePattern != null) + { + queryParams.Add("namePattern", namePattern); + } + if (includeDownloadTickets != null) + { + queryParams.Add("includeDownloadTickets", includeDownloadTickets.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID + /// Name of the secure file to match. Can include wildcards to match multiple files. + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// Filter by secure file permissions for View, Manage or Use action. Defaults to View. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetSecureFilesAsync( + Guid project, + string namePattern = null, + bool? includeDownloadTickets = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (namePattern != null) + { + queryParams.Add("namePattern", namePattern); + } + if (includeDownloadTickets != null) + { + queryParams.Add("includeDownloadTickets", includeDownloadTickets.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID or project name + /// A list of secure file Ids + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetSecureFilesByIdsAsync( + string project, + IEnumerable secureFileIds, + bool? includeDownloadTickets = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string secureFileIdsAsString = null; + if (secureFileIds != null) + { + secureFileIdsAsString = string.Join(",", secureFileIds); + } + queryParams.Add("secureFileIds", secureFileIdsAsString); + if (includeDownloadTickets != null) + { + queryParams.Add("includeDownloadTickets", includeDownloadTickets.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID + /// A list of secure file Ids + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetSecureFilesByIdsAsync( + Guid project, + IEnumerable secureFileIds, + bool? includeDownloadTickets = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string secureFileIdsAsString = null; + if (secureFileIds != null) + { + secureFileIdsAsString = string.Join(",", secureFileIds); + } + queryParams.Add("secureFileIds", secureFileIdsAsString); + if (includeDownloadTickets != null) + { + queryParams.Add("includeDownloadTickets", includeDownloadTickets.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID or project name + /// A list of secure file Ids + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetSecureFilesByNamesAsync( + string project, + IEnumerable secureFileNames, + bool? includeDownloadTickets = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string secureFileNamesAsString = null; + if (secureFileNames != null) + { + secureFileNamesAsString = string.Join(",", secureFileNames); + } + queryParams.Add("secureFileNames", secureFileNamesAsString); + if (includeDownloadTickets != null) + { + queryParams.Add("includeDownloadTickets", includeDownloadTickets.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID + /// A list of secure file Ids + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetSecureFilesByNamesAsync( + Guid project, + IEnumerable secureFileNames, + bool? includeDownloadTickets = null, + SecureFileActionFilter? actionFilter = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string secureFileNamesAsString = null; + if (secureFileNames != null) + { + secureFileNamesAsString = string.Join(",", secureFileNames); + } + queryParams.Add("secureFileNames", secureFileNamesAsString); + if (includeDownloadTickets != null) + { + queryParams.Add("includeDownloadTickets", includeDownloadTickets.Value.ToString()); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Query secure files using a name pattern and a condition on file properties. + /// + /// Project ID or project name + /// The main condition syntax is described [here](https://go.microsoft.com/fwlink/?linkid=842996). Use the *property('property-name')* function to access the value of the specified property of a secure file. It returns null if the property is not set. E.g. ``` and( eq( property('devices'), '2' ), in( property('provisioning profile type'), 'ad hoc', 'development' ) ) ``` + /// Name of the secure file to match. Can include wildcards to match multiple files. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> QuerySecureFilesByPropertiesAsync( + string project, + string condition, + string namePattern = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(condition, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (namePattern != null) + { + queryParams.Add("namePattern", namePattern); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Query secure files using a name pattern and a condition on file properties. + /// + /// Project ID + /// The main condition syntax is described [here](https://go.microsoft.com/fwlink/?linkid=842996). Use the *property('property-name')* function to access the value of the specified property of a secure file. It returns null if the property is not set. E.g. ``` and( eq( property('devices'), '2' ), in( property('provisioning profile type'), 'ad hoc', 'development' ) ) ``` + /// Name of the secure file to match. Can include wildcards to match multiple files. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> QuerySecureFilesByPropertiesAsync( + Guid project, + string condition, + string namePattern = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(condition, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (namePattern != null) + { + queryParams.Add("namePattern", namePattern); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update the name or properties of an existing secure file + /// + /// Project ID or project name + /// The unique secure file Id + /// The secure file with updated name and/or properties + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateSecureFileAsync( + string project, + Guid secureFileId, + SecureFile secureFile, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + HttpContent content = new ObjectContent(secureFile, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update the name or properties of an existing secure file + /// + /// Project ID + /// The unique secure file Id + /// The secure file with updated name and/or properties + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateSecureFileAsync( + Guid project, + Guid secureFileId, + SecureFile secureFile, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project, secureFileId = secureFileId }; + HttpContent content = new ObjectContent(secureFile, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update properties and/or names of a set of secure files. Files are identified by their IDs. Properties provided override the existing one entirely, i.e. do not merge. + /// + /// Project ID or project name + /// A list of secure file objects. Only three field must be populated Id, Name, and Properties. The rest of fields in the object are ignored. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateSecureFilesAsync( + string project, + IEnumerable secureFiles, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent>(secureFiles, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update properties and/or names of a set of secure files. Files are identified by their IDs. Properties provided override the existing one entirely, i.e. do not merge. + /// + /// Project ID + /// A list of secure file objects. Only three field must be populated Id, Name, and Properties. The rest of fields in the object are ignored. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateSecureFilesAsync( + Guid project, + IEnumerable secureFiles, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent>(secureFiles, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Upload a secure file, include the file stream in the request body + /// + /// Project ID or project name + /// Stream to upload + /// Name of the file to upload + /// If authorizePipelines is true, then the secure file is authorized for use by all pipelines in the project. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UploadSecureFileAsync( + string project, + Stream uploadStream, + string name, + bool? authorizePipelines = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + HttpContent content = new StreamContent(uploadStream); + content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + + List> queryParams = new List>(); + queryParams.Add("name", name); + if (authorizePipelines != null) + { + queryParams.Add("authorizePipelines", authorizePipelines.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Upload a secure file, include the file stream in the request body + /// + /// Project ID + /// Stream to upload + /// Name of the file to upload + /// If authorizePipelines is true, then the secure file is authorized for use by all pipelines in the project. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UploadSecureFileAsync( + Guid project, + Stream uploadStream, + string name, + bool? authorizePipelines = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("adcfd8bc-b184-43ba-bd84-7c8c6a2ff421"); + object routeValues = new { project = project }; + HttpContent content = new StreamContent(uploadStream); + content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + + List> queryParams = new List>(); + queryParams.Add("name", name); + if (authorizePipelines != null) + { + queryParams.Add("authorizePipelines", authorizePipelines.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task CreateAgentSessionAsync( + int poolId, + TaskAgentSession session, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("134e239e-2df3-4794-a6f6-24f1f19ec8dc"); + object routeValues = new { poolId = poolId }; + HttpContent content = new ObjectContent(session, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteAgentSessionAsync( + int poolId, + Guid sessionId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("134e239e-2df3-4794-a6f6-24f1f19ec8dc"); + object routeValues = new { poolId = poolId, sessionId = sessionId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Register a deployment target to a deployment group. Generally this is called by agent configuration tool. + /// + /// Project ID or project name + /// ID of the deployment group to which the deployment target is registered. + /// Deployment target to register. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddDeploymentTargetAsync( + string project, + int deploymentGroupId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Register a deployment target to a deployment group. Generally this is called by agent configuration tool. + /// + /// Project ID + /// ID of the deployment group to which the deployment target is registered. + /// Deployment target to register. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddDeploymentTargetAsync( + Guid project, + int deploymentGroupId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete a deployment target in a deployment group. This deletes the agent from associated deployment pool too. + /// + /// Project ID or project name + /// ID of the deployment group in which deployment target is deleted. + /// ID of the deployment target to delete. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteDeploymentTargetAsync( + string project, + int deploymentGroupId, + int targetId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Delete a deployment target in a deployment group. This deletes the agent from associated deployment pool too. + /// + /// Project ID + /// ID of the deployment group in which deployment target is deleted. + /// ID of the deployment target to delete. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteDeploymentTargetAsync( + Guid project, + int deploymentGroupId, + int targetId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get a deployment target by its ID in a deployment group + /// + /// Project ID or project name + /// ID of the deployment group to which deployment target belongs. + /// ID of the deployment target to return. + /// Include these additional details in the returned objects. + /// + /// The cancellation token to cancel operation. + public virtual Task GetDeploymentTargetAsync( + string project, + int deploymentGroupId, + int targetId, + DeploymentTargetExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + + List> queryParams = new List>(); + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a deployment target by its ID in a deployment group + /// + /// Project ID + /// ID of the deployment group to which deployment target belongs. + /// ID of the deployment target to return. + /// Include these additional details in the returned objects. + /// + /// The cancellation token to cancel operation. + public virtual Task GetDeploymentTargetAsync( + Guid project, + int deploymentGroupId, + int targetId, + DeploymentTargetExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + + List> queryParams = new List>(); + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of deployment targets in a deployment group. + /// + /// Project ID or project name + /// ID of the deployment group. + /// Get only the deployment targets that contain all these comma separted list of tags. + /// Name pattern of the deployment targets to return. + /// When set to true, treats **name** as pattern. Else treats it as absolute match. Default is **false**. + /// Include these additional details in the returned objects. + /// Get only deployment targets that have this status. + /// Get only deployment targets that have this last job result. + /// Get deployment targets with names greater than this continuationToken lexicographically. + /// Maximum number of deployment targets to return. Default is **1000**. + /// Get only deployment targets that are enabled or disabled. Default is 'null' which returns all the targets. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDeploymentTargetsAsync( + string project, + int deploymentGroupId, + IEnumerable tags = null, + string name = null, + bool? partialNameMatch = null, + DeploymentTargetExpands? expand = null, + TaskAgentStatusFilter? agentStatus = null, + TaskAgentJobResultFilter? agentJobResult = null, + string continuationToken = null, + int? top = null, + bool? enabled = null, + IEnumerable propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (name != null) + { + queryParams.Add("name", name); + } + if (partialNameMatch != null) + { + queryParams.Add("partialNameMatch", partialNameMatch.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + if (agentStatus != null) + { + queryParams.Add("agentStatus", agentStatus.Value.ToString()); + } + if (agentJobResult != null) + { + queryParams.Add("agentJobResult", agentJobResult.Value.ToString()); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (enabled != null) + { + queryParams.Add("enabled", enabled.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of deployment targets in a deployment group. + /// + /// Project ID + /// ID of the deployment group. + /// Get only the deployment targets that contain all these comma separted list of tags. + /// Name pattern of the deployment targets to return. + /// When set to true, treats **name** as pattern. Else treats it as absolute match. Default is **false**. + /// Include these additional details in the returned objects. + /// Get only deployment targets that have this status. + /// Get only deployment targets that have this last job result. + /// Get deployment targets with names greater than this continuationToken lexicographically. + /// Maximum number of deployment targets to return. Default is **1000**. + /// Get only deployment targets that are enabled or disabled. Default is 'null' which returns all the targets. + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDeploymentTargetsAsync( + Guid project, + int deploymentGroupId, + IEnumerable tags = null, + string name = null, + bool? partialNameMatch = null, + DeploymentTargetExpands? expand = null, + TaskAgentStatusFilter? agentStatus = null, + TaskAgentJobResultFilter? agentJobResult = null, + string continuationToken = null, + int? top = null, + bool? enabled = null, + IEnumerable propertyFilters = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (name != null) + { + queryParams.Add("name", name); + } + if (partialNameMatch != null) + { + queryParams.Add("partialNameMatch", partialNameMatch.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + if (agentStatus != null) + { + queryParams.Add("agentStatus", agentStatus.Value.ToString()); + } + if (agentJobResult != null) + { + queryParams.Add("agentJobResult", agentJobResult.Value.ToString()); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (enabled != null) + { + queryParams.Add("enabled", enabled.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Replace a deployment target in a deployment group. Generally this is called by agent configuration tool. + /// + /// Project ID or project name + /// ID of the deployment group in which deployment target is replaced. + /// ID of the deployment target to replace. + /// New deployment target. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task ReplaceDeploymentTargetAsync( + string project, + int deploymentGroupId, + int targetId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Replace a deployment target in a deployment group. Generally this is called by agent configuration tool. + /// + /// Project ID + /// ID of the deployment group in which deployment target is replaced. + /// ID of the deployment target to replace. + /// New deployment target. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task ReplaceDeploymentTargetAsync( + Guid project, + int deploymentGroupId, + int targetId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a deployment target and its agent properties in a deployment group. Generally this is called by agent configuration tool. + /// + /// Project ID or project name + /// ID of the deployment group in which deployment target is updated. + /// ID of the deployment target to update. + /// Deployment target to update. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateDeploymentTargetAsync( + string project, + int deploymentGroupId, + int targetId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a deployment target and its agent properties in a deployment group. Generally this is called by agent configuration tool. + /// + /// Project ID + /// ID of the deployment group in which deployment target is updated. + /// ID of the deployment target to update. + /// Deployment target to update. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateDeploymentTargetAsync( + Guid project, + int deploymentGroupId, + int targetId, + DeploymentMachine machine, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId, targetId = targetId }; + HttpContent content = new ObjectContent(machine, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update tags of a list of deployment targets in a deployment group. + /// + /// Project ID or project name + /// ID of the deployment group in which deployment targets are updated. + /// Deployment targets with tags to udpdate. + /// + /// The cancellation token to cancel operation. + public virtual Task> UpdateDeploymentTargetsAsync( + string project, + int deploymentGroupId, + IEnumerable machines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent>(machines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update tags of a list of deployment targets in a deployment group. + /// + /// Project ID + /// ID of the deployment group in which deployment targets are updated. + /// Deployment targets with tags to udpdate. + /// + /// The cancellation token to cancel operation. + public virtual Task> UpdateDeploymentTargetsAsync( + Guid project, + int deploymentGroupId, + IEnumerable machines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + HttpContent content = new ObjectContent>(machines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a task group. + /// + /// Project ID or project name + /// Task group object to create. + /// + /// The cancellation token to cancel operation. + public virtual Task AddTaskGroupAsync( + string project, + TaskGroupCreateParameter taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Create a task group. + /// + /// Project ID + /// Task group object to create. + /// + /// The cancellation token to cancel operation. + public virtual Task AddTaskGroupAsync( + Guid project, + TaskGroupCreateParameter taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete a task group. + /// + /// Project ID or project name + /// Id of the task group to be deleted. + /// Comments to delete. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTaskGroupAsync( + string project, + Guid taskGroupId, + string comment = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (comment != null) + { + queryParams.Add("comment", comment); + } + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Delete a task group. + /// + /// Project ID + /// Id of the task group to be deleted. + /// Comments to delete. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTaskGroupAsync( + Guid project, + Guid taskGroupId, + string comment = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (comment != null) + { + queryParams.Add("comment", comment); + } + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get task group. + /// + /// Project ID or project name + /// Id of the task group. + /// version specification of the task group. examples: 1, 1.0. + /// The properties that should be expanded. example $expand=Tasks will expand nested task groups. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetTaskGroupAsync( + string project, + Guid taskGroupId, + string versionSpec, + TaskGroupExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + queryParams.Add("versionSpec", versionSpec); + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get task group. + /// + /// Project ID + /// Id of the task group. + /// version specification of the task group. examples: 1, 1.0. + /// The properties that should be expanded. example $expand=Tasks will expand nested task groups. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetTaskGroupAsync( + Guid project, + Guid taskGroupId, + string versionSpec, + TaskGroupExpands? expand = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + queryParams.Add("versionSpec", versionSpec); + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task GetTaskGroupRevisionAsync( + string project, + Guid taskGroupId, + int revision, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + queryParams.Add("revision", revision.ToString(CultureInfo.InvariantCulture)); + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + queryParameters: queryParams, + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task GetTaskGroupRevisionAsync( + Guid project, + Guid taskGroupId, + int revision, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + queryParams.Add("revision", revision.ToString(CultureInfo.InvariantCulture)); + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + queryParameters: queryParams, + mediaType: "text/plain", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] List task groups. + /// + /// Project ID or project name + /// Id of the task group. + /// 'true' to recursively expand task groups. Default is 'false'. + /// Guid of the taskId to filter. + /// 'true'to include deleted task groups. Default is 'false'. + /// Number of task groups to get. + /// Gets the task groups after the continuation token provided. + /// Gets the results in the defined order. Default is 'CreatedOnDescending'. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTaskGroupsAsync( + string project, + Guid? taskGroupId = null, + bool? expanded = null, + Guid? taskIdFilter = null, + bool? deleted = null, + int? top = null, + DateTime? continuationToken = null, + TaskGroupQueryOrder? queryOrder = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (expanded != null) + { + queryParams.Add("expanded", expanded.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + if (deleted != null) + { + queryParams.Add("deleted", deleted.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + AddDateTimeToQueryParams(queryParams, "continuationToken", continuationToken.Value); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] List task groups. + /// + /// Project ID + /// Id of the task group. + /// 'true' to recursively expand task groups. Default is 'false'. + /// Guid of the taskId to filter. + /// 'true'to include deleted task groups. Default is 'false'. + /// Number of task groups to get. + /// Gets the task groups after the continuation token provided. + /// Gets the results in the defined order. Default is 'CreatedOnDescending'. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTaskGroupsAsync( + Guid project, + Guid? taskGroupId = null, + bool? expanded = null, + Guid? taskIdFilter = null, + bool? deleted = null, + int? top = null, + DateTime? continuationToken = null, + TaskGroupQueryOrder? queryOrder = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (expanded != null) + { + queryParams.Add("expanded", expanded.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + if (deleted != null) + { + queryParams.Add("deleted", deleted.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + AddDateTimeToQueryParams(queryParams, "continuationToken", continuationToken.Value); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> PublishPreviewTaskGroupAsync( + string project, + Guid taskGroupId, + TaskGroup taskGroup, + bool? disablePriorVersions = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (disablePriorVersions != null) + { + queryParams.Add("disablePriorVersions", disablePriorVersions.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> PublishPreviewTaskGroupAsync( + Guid project, + Guid taskGroupId, + TaskGroup taskGroup, + bool? disablePriorVersions = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + if (disablePriorVersions != null) + { + queryParams.Add("disablePriorVersions", disablePriorVersions.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> PublishTaskGroupAsync( + string project, + Guid parentTaskGroupId, + PublishTaskGroupMetadata taskGroupMetadata, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroupMetadata, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("parentTaskGroupId", parentTaskGroupId.ToString()); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> PublishTaskGroupAsync( + Guid project, + Guid parentTaskGroupId, + PublishTaskGroupMetadata taskGroupMetadata, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroupMetadata, new VssJsonMediaTypeFormatter(true)); + + List> queryParams = new List>(); + queryParams.Add("parentTaskGroupId", parentTaskGroupId.ToString()); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UndeleteTaskGroupAsync( + string project, + TaskGroup taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UndeleteTaskGroupAsync( + Guid project, + TaskGroup taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a task group. + /// + /// Project ID or project name + /// Task group to update. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("Use UpdateTaskGroup(Guid taskGroupId, [FromBody] TaskGroupUpdateParameter taskGroup) instead")] + public virtual Task UpdateTaskGroupAsync( + string project, + TaskGroupUpdateParameter taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a task group. + /// + /// Project ID + /// Task group to update. + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("Use UpdateTaskGroup(Guid taskGroupId, [FromBody] TaskGroupUpdateParameter taskGroup) instead")] + public virtual Task UpdateTaskGroupAsync( + Guid project, + TaskGroupUpdateParameter taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a task group. + /// + /// Project ID or project name + /// Id of the task group to update. + /// Task group to update. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateTaskGroupAsync( + string project, + Guid taskGroupId, + TaskGroupUpdateParameter taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a task group. + /// + /// Project ID + /// Id of the task group to update. + /// Task group to update. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateTaskGroupAsync( + Guid project, + Guid taskGroupId, + TaskGroupUpdateParameter taskGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + HttpContent content = new ObjectContent(taskGroup, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteTaskDefinitionAsync( + Guid taskId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("60aac929-f0cd-4bc8-9ce4-6b30e8f1b1bd"); + object routeValues = new { taskId = taskId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task GetTaskContentZipAsync( + Guid taskId, + string versionString, + IEnumerable visibility = null, + bool? scopeLocal = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("60aac929-f0cd-4bc8-9ce4-6b30e8f1b1bd"); + object routeValues = new { taskId = taskId, versionString = versionString }; + + List> queryParams = new List>(); + if (visibility != null) + { + AddIEnumerableAsQueryParams(queryParams, "visibility", visibility); + } + if (scopeLocal != null) + { + queryParams.Add("scopeLocal", scopeLocal.Value.ToString()); + } + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + queryParameters: queryParams, + mediaType: "application/zip", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetTaskDefinitionAsync( + Guid taskId, + string versionString, + IEnumerable visibility = null, + bool? scopeLocal = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("60aac929-f0cd-4bc8-9ce4-6b30e8f1b1bd"); + object routeValues = new { taskId = taskId, versionString = versionString }; + + List> queryParams = new List>(); + if (visibility != null) + { + AddIEnumerableAsQueryParams(queryParams, "visibility", visibility); + } + if (scopeLocal != null) + { + queryParams.Add("scopeLocal", scopeLocal.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetTaskDefinitionsAsync( + Guid? taskId = null, + IEnumerable visibility = null, + bool? scopeLocal = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("60aac929-f0cd-4bc8-9ce4-6b30e8f1b1bd"); + object routeValues = new { taskId = taskId }; + + List> queryParams = new List>(); + if (visibility != null) + { + AddIEnumerableAsQueryParams(queryParams, "visibility", visibility); + } + if (scopeLocal != null) + { + queryParams.Add("scopeLocal", scopeLocal.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateAgentUpdateStateAsync( + int poolId, + int agentId, + string currentState, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("8cc1b02b-ae49-4516-b5ad-4f9b29967c30"); + object routeValues = new { poolId = poolId, agentId = agentId }; + + List> queryParams = new List>(); + queryParams.Add("currentState", currentState); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateAgentUserCapabilitiesAsync( + int poolId, + int agentId, + IDictionary userCapabilities, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("30ba3ada-fedf-4da8-bbb5-dacf2f82e176"); + object routeValues = new { poolId = poolId, agentId = agentId }; + HttpContent content = new ObjectContent>(userCapabilities, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Add a variable group. + /// + /// Project ID or project name + /// Variable group to add. + /// + /// The cancellation token to cancel operation. + public virtual Task AddVariableGroupAsync( + string project, + VariableGroupParameters group, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(group, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Add a variable group. + /// + /// Project ID + /// Variable group to add. + /// + /// The cancellation token to cancel operation. + public virtual Task AddVariableGroupAsync( + Guid project, + VariableGroupParameters group, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project }; + HttpContent content = new ObjectContent(group, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Delete a variable group + /// + /// Project ID or project name + /// Id of the variable group. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteVariableGroupAsync( + string project, + int groupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project, groupId = groupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Delete a variable group + /// + /// Project ID + /// Id of the variable group. + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteVariableGroupAsync( + Guid project, + int groupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project, groupId = groupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] Get a variable group. + /// + /// Project ID or project name + /// Id of the variable group. + /// + /// The cancellation token to cancel operation. + public virtual Task GetVariableGroupAsync( + string project, + int groupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project, groupId = groupId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a variable group. + /// + /// Project ID + /// Id of the variable group. + /// + /// The cancellation token to cancel operation. + public virtual Task GetVariableGroupAsync( + Guid project, + int groupId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project, groupId = groupId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get variable groups. + /// + /// Project ID or project name + /// Name of variable group. + /// Action filter for the variable group. It specifies the action which can be performed on the variable groups. + /// Number of variable groups to get. + /// Gets the variable groups after the continuation token provided. + /// Gets the results in the defined order. Default is 'IdDescending'. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetVariableGroupsAsync( + string project, + string groupName = null, + VariableGroupActionFilter? actionFilter = null, + int? top = null, + int? continuationToken = null, + VariableGroupQueryOrder? queryOrder = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (groupName != null) + { + queryParams.Add("groupName", groupName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken.Value.ToString(CultureInfo.InvariantCulture)); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get variable groups. + /// + /// Project ID + /// Name of variable group. + /// Action filter for the variable group. It specifies the action which can be performed on the variable groups. + /// Number of variable groups to get. + /// Gets the variable groups after the continuation token provided. + /// Gets the results in the defined order. Default is 'IdDescending'. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetVariableGroupsAsync( + Guid project, + string groupName = null, + VariableGroupActionFilter? actionFilter = null, + int? top = null, + int? continuationToken = null, + VariableGroupQueryOrder? queryOrder = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (groupName != null) + { + queryParams.Add("groupName", groupName); + } + if (actionFilter != null) + { + queryParams.Add("actionFilter", actionFilter.Value.ToString()); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken.Value.ToString(CultureInfo.InvariantCulture)); + } + if (queryOrder != null) + { + queryParams.Add("queryOrder", queryOrder.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get variable groups by ids. + /// + /// Project ID or project name + /// Comma separated list of Ids of variable groups. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetVariableGroupsByIdAsync( + string project, + IEnumerable groupIds, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string groupIdsAsString = null; + if (groupIds != null) + { + groupIdsAsString = string.Join(",", groupIds); + } + queryParams.Add("groupIds", groupIdsAsString); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get variable groups by ids. + /// + /// Project ID + /// Comma separated list of Ids of variable groups. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetVariableGroupsByIdAsync( + Guid project, + IEnumerable groupIds, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + string groupIdsAsString = null; + if (groupIds != null) + { + groupIdsAsString = string.Join(",", groupIds); + } + queryParams.Add("groupIds", groupIdsAsString); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Update a variable group. + /// + /// Project ID or project name + /// Id of the variable group to update. + /// Variable group to update. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateVariableGroupAsync( + string project, + int groupId, + VariableGroupParameters group, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project, groupId = groupId }; + HttpContent content = new ObjectContent(group, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] Update a variable group. + /// + /// Project ID + /// Id of the variable group to update. + /// Variable group to update. + /// + /// The cancellation token to cancel operation. + public virtual Task UpdateVariableGroupAsync( + Guid project, + int groupId, + VariableGroupParameters group, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("f5b09dd5-9d54-45a1-8b5a-1c8287d634cc"); + object routeValues = new { project = project, groupId = groupId }; + HttpContent content = new ObjectContent(group, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> QuerySharedProjectsForVariableGroupAsync( + int groupId, + string project, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("74455598-def7-499a-b7a3-a41d1c8225f8"); + object routeValues = new { groupId = groupId }; + + List> queryParams = new List>(); + queryParams.Add("project", project); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task ShareVariableGroupWithProjectAsync( + int groupId, + string fromProject, + string withProject, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("74455598-def7-499a-b7a3-a41d1c8225f8"); + object routeValues = new { groupId = groupId }; + + List> queryParams = new List>(); + queryParams.Add("fromProject", fromProject); + queryParams.Add("withProject", withProject); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddVirtualMachineGroupAsync( + string project, + int environmentId, + VirtualMachineGroupCreateParameters createParameters, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(createParameters, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AddVirtualMachineGroupAsync( + Guid project, + int environmentId, + VirtualMachineGroupCreateParameters createParameters, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(createParameters, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteVirtualMachineGroupAsync( + string project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task DeleteVirtualMachineGroupAsync( + Guid project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetVirtualMachineGroupAsync( + string project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetVirtualMachineGroupAsync( + Guid project, + int environmentId, + int resourceId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateVirtualMachineGroupAsync( + string project, + int environmentId, + VirtualMachineGroup resource, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(resource, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task UpdateVirtualMachineGroupAsync( + Guid project, + int environmentId, + VirtualMachineGroup resource, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("9e597901-4af7-4cc3-8d92-47d54db8ebfb"); + object routeValues = new { project = project, environmentId = environmentId }; + HttpContent content = new ObjectContent(resource, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetVirtualMachinesAsync( + string project, + int environmentId, + int resourceId, + string continuationToken = null, + string name = null, + bool? partialNameMatch = null, + IEnumerable tags = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("48700676-2ba5-4282-8ec8-083280d169c7"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + List> queryParams = new List>(); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (name != null) + { + queryParams.Add("name", name); + } + if (partialNameMatch != null) + { + queryParams.Add("partialNameMatch", partialNameMatch.Value.ToString()); + } + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetVirtualMachinesAsync( + Guid project, + int environmentId, + int resourceId, + string continuationToken = null, + string name = null, + bool? partialNameMatch = null, + IEnumerable tags = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("48700676-2ba5-4282-8ec8-083280d169c7"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + + List> queryParams = new List>(); + if (continuationToken != null) + { + queryParams.Add("continuationToken", continuationToken); + } + if (name != null) + { + queryParams.Add("name", name); + } + if (partialNameMatch != null) + { + queryParams.Add("partialNameMatch", partialNameMatch.Value.ToString()); + } + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateVirtualMachinesAsync( + string project, + int environmentId, + int resourceId, + IEnumerable machines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("48700676-2ba5-4282-8ec8-083280d169c7"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + HttpContent content = new ObjectContent>(machines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> UpdateVirtualMachinesAsync( + Guid project, + int environmentId, + int resourceId, + IEnumerable machines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("48700676-2ba5-4282-8ec8-083280d169c7"); + object routeValues = new { project = project, environmentId = environmentId, resourceId = resourceId }; + HttpContent content = new ObjectContent>(machines, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task AcquireAccessTokenAsync( + AadOauthTokenRequest authenticationRequest, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("9c63205e-3a0f-42a0-ad88-095200f13607"); + HttpContent content = new ObjectContent(authenticationRequest, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("ServiceEndpoint APIs under distributedtask area is deprecated. Use the APIs under serviceendpoint area instead.")] + public virtual Task CreateAadOAuthRequestAsync( + string tenantId, + string redirectUri, + AadLoginPromptOption? promptOption = null, + string completeCallbackPayload = null, + bool? completeCallbackByAuthCode = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("9c63205e-3a0f-42a0-ad88-095200f13607"); + + List> queryParams = new List>(); + queryParams.Add("tenantId", tenantId); + queryParams.Add("redirectUri", redirectUri); + if (promptOption != null) + { + queryParams.Add("promptOption", promptOption.Value.ToString()); + } + if (completeCallbackPayload != null) + { + queryParams.Add("completeCallbackPayload", completeCallbackPayload); + } + if (completeCallbackByAuthCode != null) + { + queryParams.Add("completeCallbackByAuthCode", completeCallbackByAuthCode.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetVstsAadTenantIdAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("9c63205e-3a0f-42a0-ad88-095200f13607"); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetYamlSchemaAsync( + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("1f9990b9-1dba-441f-9c2e-6485888c42b6"); + + return SendAsync( + httpMethod, + locationId, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + } +} diff --git a/src/Sdk/DTGenerated/Generated/TaskHttpClientBase.cs b/src/Sdk/DTGenerated/Generated/TaskHttpClientBase.cs new file mode 100644 index 00000000000..799566eab4b --- /dev/null +++ b/src/Sdk/DTGenerated/Generated/TaskHttpClientBase.cs @@ -0,0 +1,800 @@ +/* + * --------------------------------------------------------- + * Copyright(C) Microsoft Corporation. All rights reserved. + * --------------------------------------------------------- + * + * --------------------------------------------------------- + * Generated file, DO NOT EDIT + * --------------------------------------------------------- + * + * See following wiki page for instructions on how to regenerate: + * https://aka.ms/azure-devops-client-generation + * + * Configuration file: + * distributedtask\client\webapi\clientgeneratorconfigs\genclient.json + */ + +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.ComponentModel; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Http.Formatting; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + public abstract class TaskHttpClientBase : VssHttpClientBase + { + public TaskHttpClientBase(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public TaskHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public TaskHttpClientBase(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public TaskHttpClientBase(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public TaskHttpClientBase(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetPlanAttachmentsAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + string type, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("eb55e5d6-2f30-4295-b5ed-38da50b1fc52"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, type = type }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// Stream to upload + /// + /// The cancellation token to cancel operation. + public virtual Task CreateAttachmentAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + Guid recordId, + string type, + string name, + Stream uploadStream, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PUT"); + Guid locationId = new Guid("7898f959-9cdf-4096-b29e-7f293031629e"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId, recordId = recordId, type = type, name = name }; + HttpContent content = new StreamContent(uploadStream); + content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetAttachmentAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + Guid recordId, + string type, + string name, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7898f959-9cdf-4096-b29e-7f293031629e"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId, recordId = recordId, type = type, name = name }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual async Task GetAttachmentContentAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + Guid recordId, + string type, + string name, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7898f959-9cdf-4096-b29e-7f293031629e"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId, recordId = recordId, type = type, name = name }; + HttpResponseMessage response; + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("5.1-preview.1"), + mediaType: "application/octet-stream", + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + response.EnsureSuccessStatusCode(); + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + Stream responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + return new GZipStream(responseStream, CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetAttachmentsAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + Guid recordId, + string type, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("7898f959-9cdf-4096-b29e-7f293031629e"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId, recordId = recordId, type = type }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual async Task AppendTimelineRecordFeedAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + Guid recordId, + TimelineRecordFeedLinesWrapper lines, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("858983e4-19bd-4c5e-864c-507b59b58b12"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId, recordId = recordId }; + HttpContent content = new ObjectContent(lines, new VssJsonMediaTypeFormatter(true)); + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetJobInstanceAsync( + Guid scopeIdentifier, + string hubName, + string orchestrationId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0a1efd25-abda-43bd-9629-6c7bdd2e0d60"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, orchestrationId = orchestrationId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// Stream to upload + /// + /// The cancellation token to cancel operation. + public virtual Task AppendLogContentAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + int logId, + Stream uploadStream, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("46f5667d-263a-4684-91b1-dff7fdcf64e2"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, logId = logId }; + HttpContent content = new StreamContent(uploadStream); + content.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task CreateLogAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + TaskLog log, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("46f5667d-263a-4684-91b1-dff7fdcf64e2"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId }; + HttpContent content = new ObjectContent(log, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetLogAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + int logId, + long? startLine = null, + long? endLine = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("46f5667d-263a-4684-91b1-dff7fdcf64e2"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, logId = logId }; + + List> queryParams = new List>(); + if (startLine != null) + { + queryParams.Add("startLine", startLine.Value.ToString(CultureInfo.InvariantCulture)); + } + if (endLine != null) + { + queryParams.Add("endLine", endLine.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetLogsAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("46f5667d-263a-4684-91b1-dff7fdcf64e2"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetPlanGroupsQueueMetricsAsync( + Guid scopeIdentifier, + string hubName, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("038fd4d5-cda7-44ca-92c0-935843fee1a7"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task> GetQueuedPlanGroupsAsync( + Guid scopeIdentifier, + string hubName, + PlanGroupStatus? statusFilter = null, + int? count = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("0dd73091-3e36-4f43-b443-1b76dd426d84"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName }; + + List> queryParams = new List>(); + if (statusFilter != null) + { + queryParams.Add("statusFilter", statusFilter.Value.ToString()); + } + if (count != null) + { + queryParams.Add("count", count.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetQueuedPlanGroupAsync( + Guid scopeIdentifier, + string hubName, + string planGroup, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("65fd0708-bc1e-447b-a731-0587c5464e5b"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planGroup = planGroup }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual Task GetPlanAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("5cecd946-d704-471e-a45f-3b4064fcfaba"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId }; + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetRecordsAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + int? changeId = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("8893bc5b-35b2-4be7-83cb-99e683551db4"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId }; + + List> queryParams = new List>(); + if (changeId != null) + { + queryParams.Add("changeId", changeId.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> UpdateRecordsAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + VssJsonCollectionWrapper> records, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("PATCH"); + Guid locationId = new Guid("8893bc5b-35b2-4be7-83cb-99e683551db4"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId }; + HttpContent content = new ObjectContent>>(records, new VssJsonMediaTypeFormatter(true)); + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task CreateTimelineAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Timeline timeline, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("POST"); + Guid locationId = new Guid("83597576-cc2c-453c-bea6-2882ae6a1653"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId }; + HttpContent content = new ObjectContent(timeline, new VssJsonMediaTypeFormatter(true)); + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken, + content: content); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTimelineAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("83597576-cc2c-453c-bea6-2882ae6a1653"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task GetTimelineAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + Guid timelineId, + int? changeId = null, + bool? includeRecords = null, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("83597576-cc2c-453c-bea6-2882ae6a1653"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId, timelineId = timelineId }; + + List> queryParams = new List>(); + if (changeId != null) + { + queryParams.Add("changeId", changeId.Value.ToString(CultureInfo.InvariantCulture)); + } + if (includeRecords != null) + { + queryParams.Add("includeRecords", includeRecords.Value.ToString()); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// The project GUID to scope the request + /// The name of the server hub: "build" for the Build server or "rm" for the Release Management server + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTimelinesAsync( + Guid scopeIdentifier, + string hubName, + Guid planId, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("83597576-cc2c-453c-bea6-2882ae6a1653"); + object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId }; + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + userState: userState, + cancellationToken: cancellationToken); + } + } +} diff --git a/src/Sdk/DTLogging/Logging/ISecret.cs b/src/Sdk/DTLogging/Logging/ISecret.cs new file mode 100644 index 00000000000..9d549717294 --- /dev/null +++ b/src/Sdk/DTLogging/Logging/ISecret.cs @@ -0,0 +1,13 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Logging +{ + internal interface ISecret + { + /// + /// Returns one item (start, length) for each match found in the input string. + /// + IEnumerable GetPositions(String input); + } +} diff --git a/src/Sdk/DTLogging/Logging/ISecretMasker.cs b/src/Sdk/DTLogging/Logging/ISecretMasker.cs new file mode 100644 index 00000000000..eec40bef039 --- /dev/null +++ b/src/Sdk/DTLogging/Logging/ISecretMasker.cs @@ -0,0 +1,15 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Logging +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ISecretMasker + { + void AddRegex(String pattern); + void AddValue(String value); + void AddValueEncoder(ValueEncoder encoder); + ISecretMasker Clone(); + String MaskSecrets(String input); + } +} diff --git a/src/Sdk/DTLogging/Logging/RegexSecret.cs b/src/Sdk/DTLogging/Logging/RegexSecret.cs new file mode 100644 index 00000000000..5d1846c6b61 --- /dev/null +++ b/src/Sdk/DTLogging/Logging/RegexSecret.cs @@ -0,0 +1,50 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Text.RegularExpressions; + +namespace GitHub.DistributedTask.Logging +{ + internal sealed class RegexSecret : ISecret + { + public RegexSecret(String pattern) + { + ArgumentUtility.CheckStringForNullOrEmpty(pattern, nameof(pattern)); + m_pattern = pattern; + m_regex = new Regex(pattern); + } + + public override Boolean Equals(Object obj) + { + var item = obj as RegexSecret; + if (item == null) + { + return false; + } + return String.Equals(m_pattern, item.m_pattern, StringComparison.Ordinal); + } + + public override int GetHashCode() => m_pattern.GetHashCode(); + + public IEnumerable GetPositions(String input) + { + Int32 startIndex = 0; + while (startIndex < input.Length) + { + var match = m_regex.Match(input, startIndex); + if (match.Success) + { + startIndex = match.Index + 1; + yield return new ReplacementPosition(match.Index, match.Length); + } + else + { + yield break; + } + } + } + + private readonly String m_pattern; + private readonly Regex m_regex; + } +} diff --git a/src/Sdk/DTLogging/Logging/ReplacementPosition.cs b/src/Sdk/DTLogging/Logging/ReplacementPosition.cs new file mode 100644 index 00000000000..acfcd9a20e6 --- /dev/null +++ b/src/Sdk/DTLogging/Logging/ReplacementPosition.cs @@ -0,0 +1,29 @@ +using System; + +namespace GitHub.DistributedTask.Logging +{ + internal sealed class ReplacementPosition + { + public ReplacementPosition(Int32 start, Int32 length) + { + Start = start; + Length = length; + } + + public ReplacementPosition(ReplacementPosition copy) + { + Start = copy.Start; + Length = copy.Length; + } + + public Int32 Start { get; set; } + public Int32 Length { get; set; } + public Int32 End + { + get + { + return Start + Length; + } + } + } +} diff --git a/src/Sdk/DTLogging/Logging/SecretMasker.cs b/src/Sdk/DTLogging/Logging/SecretMasker.cs new file mode 100644 index 00000000000..430b977f521 --- /dev/null +++ b/src/Sdk/DTLogging/Logging/SecretMasker.cs @@ -0,0 +1,298 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; +using System.Threading; + +namespace GitHub.DistributedTask.Logging +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class SecretMasker : ISecretMasker, IDisposable + { + public SecretMasker() + { + m_originalValueSecrets = new HashSet(); + m_regexSecrets = new HashSet(); + m_valueEncoders = new HashSet(); + m_valueSecrets = new HashSet(); + } + + private SecretMasker(SecretMasker copy) + { + // Read section. + try + { + copy.m_lock.EnterReadLock(); + + // Copy the hash sets. + m_originalValueSecrets = new HashSet(copy.m_originalValueSecrets); + m_regexSecrets = new HashSet(copy.m_regexSecrets); + m_valueEncoders = new HashSet(copy.m_valueEncoders); + m_valueSecrets = new HashSet(copy.m_valueSecrets); + } + finally + { + if (copy.m_lock.IsReadLockHeld) + { + copy.m_lock.ExitReadLock(); + } + } + } + + /// + /// This implementation assumes no more than one thread is adding regexes, values, or encoders at any given time. + /// + public void AddRegex(String pattern) + { + // Test for empty. + if (String.IsNullOrEmpty(pattern)) + { + return; + } + + // Write section. + try + { + m_lock.EnterWriteLock(); + + // Add the value. + m_regexSecrets.Add(new RegexSecret(pattern)); + } + finally + { + if (m_lock.IsWriteLockHeld) + { + m_lock.ExitWriteLock(); + } + } + } + + /// + /// This implementation assumes no more than one thread is adding regexes, values, or encoders at any given time. + /// + public void AddValue(String value) + { + // Test for empty. + if (String.IsNullOrEmpty(value)) + { + return; + } + + var valueSecrets = new List(new[] { new ValueSecret(value) }); + + // Read section. + ValueEncoder[] valueEncoders; + try + { + m_lock.EnterReadLock(); + + // Test whether already added. + if (m_originalValueSecrets.Contains(valueSecrets[0])) + { + return; + } + + // Read the value encoders. + valueEncoders = m_valueEncoders.ToArray(); + } + finally + { + if (m_lock.IsReadLockHeld) + { + m_lock.ExitReadLock(); + } + } + + // Compute the encoded values. + foreach (ValueEncoder valueEncoder in valueEncoders) + { + String encodedValue = valueEncoder(value); + if (!String.IsNullOrEmpty(encodedValue)) + { + valueSecrets.Add(new ValueSecret(encodedValue)); + } + } + + // Write section. + try + { + m_lock.EnterWriteLock(); + + // Add the values. + m_originalValueSecrets.Add(valueSecrets[0]); + foreach (ValueSecret valueSecret in valueSecrets) + { + m_valueSecrets.Add(valueSecret); + } + } + finally + { + if (m_lock.IsWriteLockHeld) + { + m_lock.ExitWriteLock(); + } + } + } + + /// + /// This implementation assumes no more than one thread is adding regexes, values, or encoders at any given time. + /// + public void AddValueEncoder(ValueEncoder encoder) + { + ValueSecret[] originalSecrets; + + // Read section. + try + { + m_lock.EnterReadLock(); + + // Test whether already added. + if (m_valueEncoders.Contains(encoder)) + { + return; + } + + // Read the original value secrets. + originalSecrets = m_originalValueSecrets.ToArray(); + } + finally + { + if (m_lock.IsReadLockHeld) + { + m_lock.ExitReadLock(); + } + } + + // Compute the encoded values. + var encodedSecrets = new List(); + foreach (ValueSecret originalSecret in originalSecrets) + { + String encodedValue = encoder(originalSecret.m_value); + if (!String.IsNullOrEmpty(encodedValue)) + { + encodedSecrets.Add(new ValueSecret(encodedValue)); + } + } + + // Write section. + try + { + m_lock.EnterWriteLock(); + + // Add the encoder. + m_valueEncoders.Add(encoder); + + // Add the values. + foreach (ValueSecret encodedSecret in encodedSecrets) + { + m_valueSecrets.Add(encodedSecret); + } + } + finally + { + if (m_lock.IsWriteLockHeld) + { + m_lock.ExitWriteLock(); + } + } + } + + public ISecretMasker Clone() => new SecretMasker(this); + + public void Dispose() + { + m_lock?.Dispose(); + m_lock = null; + } + + public String MaskSecrets(String input) + { + if (String.IsNullOrEmpty(input)) + { + return String.Empty; + } + + var secretPositions = new List(); + + // Read section. + try + { + m_lock.EnterReadLock(); + + // Get indexes and lengths of all substrings that will be replaced. + foreach (RegexSecret regexSecret in m_regexSecrets) + { + secretPositions.AddRange(regexSecret.GetPositions(input)); + } + + foreach (ValueSecret valueSecret in m_valueSecrets) + { + secretPositions.AddRange(valueSecret.GetPositions(input)); + } + } + finally + { + if (m_lock.IsReadLockHeld) + { + m_lock.ExitReadLock(); + } + } + + // Short-circuit if nothing to replace. + if (secretPositions.Count == 0) + { + return input; + } + + // Merge positions into ranges of characters to replace. + List replacementPositions = new List(); + ReplacementPosition currentReplacement = null; + foreach (ReplacementPosition secretPosition in secretPositions.OrderBy(x => x.Start)) + { + if (currentReplacement == null) + { + currentReplacement = new ReplacementPosition(copy: secretPosition); + replacementPositions.Add(currentReplacement); + } + else + { + if (secretPosition.Start <= currentReplacement.End) + { + // Overlap + currentReplacement.Length = Math.Max(currentReplacement.End, secretPosition.End) - currentReplacement.Start; + } + else + { + // No overlap + currentReplacement = new ReplacementPosition(copy: secretPosition); + replacementPositions.Add(currentReplacement); + } + } + } + + // Replace + var stringBuilder = new StringBuilder(); + Int32 startIndex = 0; + foreach (var replacement in replacementPositions) + { + stringBuilder.Append(input.Substring(startIndex, replacement.Start - startIndex)); + stringBuilder.Append("***"); + startIndex = replacement.Start + replacement.Length; + } + + if (startIndex < input.Length) + { + stringBuilder.Append(input.Substring(startIndex)); + } + + return stringBuilder.ToString(); + } + + private readonly HashSet m_originalValueSecrets; + private readonly HashSet m_regexSecrets; + private readonly HashSet m_valueEncoders; + private readonly HashSet m_valueSecrets; + private ReaderWriterLockSlim m_lock = new ReaderWriterLockSlim(LockRecursionPolicy.NoRecursion); + } +} diff --git a/src/Sdk/DTLogging/Logging/ValueEncoders.cs b/src/Sdk/DTLogging/Logging/ValueEncoders.cs new file mode 100644 index 00000000000..2e9b04145e7 --- /dev/null +++ b/src/Sdk/DTLogging/Logging/ValueEncoders.cs @@ -0,0 +1,116 @@ +using System; +using System.ComponentModel; +using System.Security; +using System.Text; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Logging +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public delegate String ValueEncoder(String value); + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ValueEncoders + { + public static String Base64StringEscape(String value) + { + return Convert.ToBase64String(Encoding.UTF8.GetBytes(value)); + } + + // Base64 is 6 bytes -> char + // When end user doing somthing like base64(user:password) + // The length of the leading content will cause different base64 encoding result on the password + // So we add base64(value - 1/2/3/4/5 bytes) as secret as well. + public static String Base64StringEscapeShift1(String value) + { + return Base64StringEscapeShift(value, 1); + } + + public static String Base64StringEscapeShift2(String value) + { + return Base64StringEscapeShift(value, 2); + } + + public static String Base64StringEscapeShift3(String value) + { + return Base64StringEscapeShift(value, 3); + } + + public static String Base64StringEscapeShift4(String value) + { + return Base64StringEscapeShift(value, 4); + } + + public static String Base64StringEscapeShift5(String value) + { + return Base64StringEscapeShift(value, 5); + } + + public static String ExpressionStringEscape(String value) + { + return Expressions.ExpressionUtil.StringEscape(value); + } + + public static String JsonStringEscape(String value) + { + // Convert to a JSON string and then remove the leading/trailing double-quote. + String jsonString = JsonConvert.ToString(value); + String jsonEscapedValue = jsonString.Substring(startIndex: 1, length: jsonString.Length - 2); + return jsonEscapedValue; + } + + public static String UriDataEscape(String value) + { + return UriDataEscape(value, 65519); + } + + public static String XmlDataEscape(String value) + { + return SecurityElement.Escape(value); + } + + private static string Base64StringEscapeShift(String value, int shift) + { + var bytes = Encoding.UTF8.GetBytes(value); + if (bytes.Length > shift) + { + var shiftArray = new byte[bytes.Length - shift]; + Array.Copy(bytes, shift, shiftArray, 0, bytes.Length - shift); + return Convert.ToBase64String(shiftArray); + } + else + { + return Convert.ToBase64String(bytes); + } + } + + private static String UriDataEscape( + String value, + Int32 maxSegmentSize) + { + if (value.Length <= maxSegmentSize) + { + return Uri.EscapeDataString(value); + } + + // Workaround size limitation in Uri.EscapeDataString + var result = new StringBuilder(); + var i = 0; + do + { + var length = Math.Min(value.Length - i, maxSegmentSize); + + if (Char.IsHighSurrogate(value[i + length - 1]) && length > 1) + { + length--; + } + + result.Append(Uri.EscapeDataString(value.Substring(i, length))); + i += length; + } + while (i < value.Length); + + return result.ToString(); + } + } +} diff --git a/src/Sdk/DTLogging/Logging/ValueSecret.cs b/src/Sdk/DTLogging/Logging/ValueSecret.cs new file mode 100644 index 00000000000..8b7f6d6716b --- /dev/null +++ b/src/Sdk/DTLogging/Logging/ValueSecret.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Logging +{ + internal sealed class ValueSecret : ISecret + { + public ValueSecret(String value) + { + ArgumentUtility.CheckStringForNullOrEmpty(value, nameof(value)); + m_value = value; + } + + public override Boolean Equals(Object obj) + { + var item = obj as ValueSecret; + if (item == null) + { + return false; + } + return String.Equals(m_value, item.m_value, StringComparison.Ordinal); + } + + public override Int32 GetHashCode() => m_value.GetHashCode(); + + public IEnumerable GetPositions(String input) + { + if (!String.IsNullOrEmpty(input) && !String.IsNullOrEmpty(m_value)) + { + Int32 startIndex = 0; + while (startIndex > -1 && + startIndex < input.Length && + input.Length - startIndex >= m_value.Length) // remaining substring longer than secret value + { + startIndex = input.IndexOf(m_value, startIndex, StringComparison.Ordinal); + if (startIndex > -1) + { + yield return new ReplacementPosition(startIndex, m_value.Length); + ++startIndex; + } + } + } + } + + internal readonly String m_value; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/ContextValueNode.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/ContextValueNode.cs new file mode 100644 index 00000000000..98bc3e9baa8 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/ContextValueNode.cs @@ -0,0 +1,19 @@ +using System; +using GitHub.DistributedTask.Expressions2.Sdk; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// This expression node retrieves a user-defined named-value. This is used during expression evaluation. + /// + internal sealed class ContextValueNode : NamedValue + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return (context.State as TemplateContext).ExpressionValues[Name]; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/EmptyTraceWriter.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/EmptyTraceWriter.cs new file mode 100644 index 00000000000..db5134f5906 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/EmptyTraceWriter.cs @@ -0,0 +1,27 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class EmptyTraceWriter : ITraceWriter + { + public void Error( + String format, + params Object[] args) + { + } + + public void Info( + String format, + params Object[] args) + { + } + + public void Verbose( + String format, + params Object[] args) + { + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/ExpressionTraceWriter.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/ExpressionTraceWriter.cs new file mode 100644 index 00000000000..596f5f9467a --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/ExpressionTraceWriter.cs @@ -0,0 +1,27 @@ +using System; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Wraps an ITraceWriter so it can be passed for expression evaluation. + /// + internal sealed class ExpressionTraceWriter : DistributedTask.Expressions2.ITraceWriter + { + public ExpressionTraceWriter(ITraceWriter trace) + { + m_trace = trace; + } + + public void Info(String message) + { + m_trace.Info("{0}", message); + } + + public void Verbose(String message) + { + m_trace.Verbose("{0}", message); + } + + private readonly ITraceWriter m_trace; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectReader.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectReader.cs new file mode 100644 index 00000000000..4f09806a1ca --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectReader.cs @@ -0,0 +1,26 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Interface for reading a source object (or file). + /// This interface is used by TemplateReader to build a TemplateToken DOM. + /// + internal interface IObjectReader + { + Boolean AllowLiteral(out LiteralToken token); + + Boolean AllowSequenceStart(out SequenceToken token); + + Boolean AllowSequenceEnd(); + + Boolean AllowMappingStart(out MappingToken token); + + Boolean AllowMappingEnd(); + + void ValidateStart(); + + void ValidateEnd(); + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectWriter.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectWriter.cs new file mode 100644 index 00000000000..c53fcb1e4ed --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/IObjectWriter.cs @@ -0,0 +1,31 @@ +using System; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Interface for building an object. This interface is used by + /// TemplateWriter to convert a TemplateToken DOM to another format. + /// + internal interface IObjectWriter + { + void WriteNull(); + + void WriteBoolean(Boolean value); + + void WriteNumber(Double value); + + void WriteString(String value); + + void WriteSequenceStart(); + + void WriteSequenceEnd(); + + void WriteMappingStart(); + + void WriteMappingEnd(); + + void WriteStart(); + + void WriteEnd(); + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriter.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriter.cs new file mode 100644 index 00000000000..290e1b1ec23 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriter.cs @@ -0,0 +1,21 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITraceWriter + { + void Error( + String format, + params Object[] args); + + void Info( + String format, + params Object[] args); + + void Verbose( + String format, + params Object[] args); + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriterExtensions.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriterExtensions.cs new file mode 100644 index 00000000000..d6c274f8a9c --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/ITraceWriterExtensions.cs @@ -0,0 +1,10 @@ +namespace GitHub.DistributedTask.ObjectTemplating +{ + internal static class ITraceWriterExtensions + { + internal static DistributedTask.Expressions2.ITraceWriter ToExpressionTraceWriter(this ITraceWriter trace) + { + return new ExpressionTraceWriter(trace); + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/BooleanDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/BooleanDefinition.cs new file mode 100644 index 00000000000..cd34d9f8433 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/BooleanDefinition.cs @@ -0,0 +1,54 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class BooleanDefinition : ScalarDefinition + { + internal BooleanDefinition() + { + } + + internal BooleanDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.Boolean: + var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Boolean}"); + foreach (var mappingPair in mapping) + { + var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Boolean} key"); + switch (mappingKey.Value) + { + default: + mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Boolean} key"); + break; + } + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.Boolean; + + internal override Boolean IsMatch(LiteralToken literal) + { + return literal is BooleanToken; + } + + internal override void Validate( + TemplateSchema schema, + String name) + { + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/Definition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/Definition.cs new file mode 100644 index 00000000000..259724c2d76 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/Definition.cs @@ -0,0 +1,49 @@ +using System; +using System.Linq; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + /// + /// Defines the allowable schema for a user defined type + /// + internal abstract class Definition + { + protected Definition() + { + } + + protected Definition(MappingToken definition) + { + for (var i = 0; i < definition.Count; ) + { + var definitionKey = definition[i].Key.AssertString($"{TemplateConstants.Definition} key"); + if (String.Equals(definitionKey.Value, TemplateConstants.Context, StringComparison.Ordinal)) + { + var context = definition[i].Value.AssertSequence($"{TemplateConstants.Context}"); + definition.RemoveAt(i); + Context = context + .Select(x => x.AssertString($"{TemplateConstants.Context} item").Value) + .Distinct() + .ToArray(); + } + else if (String.Equals(definitionKey.Value, TemplateConstants.Description, StringComparison.Ordinal)) + { + definition.RemoveAt(i); + } + else + { + i++; + } + } + } + + internal abstract DefinitionType DefinitionType { get; } + + internal String[] Context { get; private set; } = new String[0]; + + internal abstract void Validate( + TemplateSchema schema, + String name); + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/DefinitionType.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/DefinitionType.cs new file mode 100644 index 00000000000..b35ee7ae016 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/DefinitionType.cs @@ -0,0 +1,13 @@ +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal enum DefinitionType + { + Null, + Boolean, + Number, + String, + Sequence, + Mapping, + OneOf, + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/MappingDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/MappingDefinition.cs new file mode 100644 index 00000000000..8e43e53edd3 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/MappingDefinition.cs @@ -0,0 +1,136 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class MappingDefinition : Definition + { + internal MappingDefinition() + { + } + + internal MappingDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.Mapping: + var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Mapping}"); + foreach (var mappingPair in mapping) + { + var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} key"); + switch (mappingKey.Value) + { + case TemplateConstants.Properties: + var properties = mappingPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties}"); + foreach (var propertiesPair in properties) + { + var propertyName = propertiesPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} key"); + var propertyValue = propertiesPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} value"); + Properties.Add(propertyName.Value, new PropertyValue(propertyValue.Value)); + } + break; + + case TemplateConstants.LooseKeyType: + var looseKeyType = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.LooseKeyType}"); + LooseKeyType = looseKeyType.Value; + break; + + case TemplateConstants.LooseValueType: + var looseValueType = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.LooseValueType}"); + LooseValueType = looseValueType.Value; + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.Mapping; + + internal String Inherits { get; set; } + + internal String LooseKeyType { get; set; } + + internal String LooseValueType { get; set; } + + internal Dictionary Properties { get; } = new Dictionary(StringComparer.Ordinal); + + internal override void Validate( + TemplateSchema schema, + String name) + { + // Lookup loose key type + if (!String.IsNullOrEmpty(LooseKeyType)) + { + schema.GetDefinition(LooseKeyType); + + // Lookup loose value type + if (!String.IsNullOrEmpty(LooseValueType)) + { + schema.GetDefinition(LooseValueType); + } + else + { + throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined"); + } + } + // Otherwise validate loose value type not be defined + else if (!String.IsNullOrEmpty(LooseValueType)) + { + throw new ArgumentException($"Property '{TemplateConstants.LooseValueType}' is defined but '{TemplateConstants.LooseKeyType}' is not defined"); + } + + // Lookup each property + foreach (var property in Properties.Values) + { + schema.GetDefinition(property.Type); + } + + if (!String.IsNullOrEmpty(Inherits)) + { + var inherited = schema.GetDefinition(Inherits); + + if (inherited.Context.Length > 0) + { + throw new NotSupportedException($"Property '{TemplateConstants.Context}' is not supported on inhertied definitions"); + } + + if (inherited.DefinitionType != DefinitionType.Mapping) + { + throw new NotSupportedException($"Expected structure of inherited definition to match. Actual '{inherited.DefinitionType}'"); + } + + var inheritedMapping = inherited as MappingDefinition; + + if (!String.IsNullOrEmpty(inheritedMapping.Inherits)) + { + throw new NotSupportedException($"Property '{TemplateConstants.Inherits}' is not supported on inherited definition"); + } + + if (!String.IsNullOrEmpty(inheritedMapping.LooseKeyType)) + { + throw new NotSupportedException($"Property '{TemplateConstants.LooseKeyType}' is not supported on inherited definition"); + } + + if (!String.IsNullOrEmpty(inheritedMapping.LooseValueType)) + { + throw new NotSupportedException($"Property '{TemplateConstants.LooseValueType}' is not supported on inherited definition"); + } + } + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NullDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NullDefinition.cs new file mode 100644 index 00000000000..0c7c862036a --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NullDefinition.cs @@ -0,0 +1,54 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class NullDefinition : ScalarDefinition + { + internal NullDefinition() + { + } + + internal NullDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.Null: + var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Null}"); + foreach (var mappingPair in mapping) + { + var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Null} key"); + switch (mappingKey.Value) + { + default: + mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Null} key"); + break; + } + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.Null; + + internal override Boolean IsMatch(LiteralToken literal) + { + return literal is NullToken; + } + + internal override void Validate( + TemplateSchema schema, + String name) + { + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NumberDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NumberDefinition.cs new file mode 100644 index 00000000000..abf2e13cfd3 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/NumberDefinition.cs @@ -0,0 +1,54 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class NumberDefinition : ScalarDefinition + { + internal NumberDefinition() + { + } + + internal NumberDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.Number: + var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Number}"); + foreach (var mappingPair in mapping) + { + var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Number} key"); + switch (mappingKey.Value) + { + default: + mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Number} key"); + break; + } + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.Number; + + internal override Boolean IsMatch(LiteralToken literal) + { + return literal is NumberToken; + } + + internal override void Validate( + TemplateSchema schema, + String name) + { + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/OneOfDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/OneOfDefinition.cs new file mode 100644 index 00000000000..200933ebf6c --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/OneOfDefinition.cs @@ -0,0 +1,209 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + /// + /// Must resolve to exactly one of the referenced definitions + /// + internal sealed class OneOfDefinition : Definition + { + internal OneOfDefinition() + { + } + + internal OneOfDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.OneOf: + var oneOf = definitionPair.Value.AssertSequence(TemplateConstants.OneOf); + foreach (var oneOfItem in oneOf) + { + var reference = oneOfItem.AssertString(TemplateConstants.OneOf); + OneOf.Add(reference.Value); + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.Mapping; + + internal List OneOf { get; } = new List(); + + internal override void Validate( + TemplateSchema schema, + String name) + { + if (OneOf.Count == 0) + { + throw new ArgumentException($"'{name}' does not contain any references"); + } + + var foundLooseKeyType = false; + var mappingDefinitions = default(List); + var sequenceDefinition = default(SequenceDefinition); + var nullDefinition = default(NullDefinition); + var booleanDefinition = default(BooleanDefinition); + var numberDefinition = default(NumberDefinition); + var stringDefinitions = default(List); + + foreach (var nestedType in OneOf) + { + var nestedDefinition = schema.GetDefinition(nestedType); + + if (nestedDefinition.Context.Length > 0) + { + throw new ArgumentException($"'{name}' is a one-of definition and references another definition that defines context. This is currently not supported."); + } + + if (nestedDefinition is MappingDefinition mappingDefinition) + { + if (mappingDefinitions == null) + { + mappingDefinitions = new List(); + } + + mappingDefinitions.Add(mappingDefinition); + + if (!String.IsNullOrEmpty(mappingDefinition.LooseKeyType)) + { + foundLooseKeyType = true; + } + } + else if (nestedDefinition is SequenceDefinition s) + { + // Multiple sequence definitions not allowed + if (sequenceDefinition != null) + { + throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Sequence}'"); + } + + sequenceDefinition = s; + } + else if (nestedDefinition is NullDefinition n) + { + // Multiple sequence definitions not allowed + if (nullDefinition != null) + { + throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Null}'"); + } + + nullDefinition = n; + } + else if (nestedDefinition is BooleanDefinition b) + { + // Multiple boolean definitions not allowed + if (booleanDefinition != null) + { + throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Boolean}'"); + } + + booleanDefinition = b; + } + else if (nestedDefinition is NumberDefinition num) + { + // Multiple number definitions not allowed + if (numberDefinition != null) + { + throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Number}'"); + } + + numberDefinition = num; + } + else if (nestedDefinition is StringDefinition stringDefinition) + { + // First string definition + if (stringDefinitions == null) + { + stringDefinitions = new List(); + } + // Multiple string definitions, all must be 'Constant' + else if ((stringDefinitions.Count == 1 && String.IsNullOrEmpty(stringDefinitions[0].Constant)) + || String.IsNullOrEmpty(stringDefinition.Constant)) + { + throw new ArgumentException($"'{name}' refers to more than one '{TemplateConstants.Scalar}', but some do not set '{TemplateConstants.Constant}'"); + } + + stringDefinitions.Add(stringDefinition); + } + else + { + throw new ArgumentException($"'{name}' refers to a '{nestedDefinition.DefinitionType}' definition"); + } + } + + if (mappingDefinitions?.Count > 1) + { + if (foundLooseKeyType) + { + throw new ArgumentException($"'{name}' refers to two mappings that both set '{TemplateConstants.LooseKeyType}'"); + } + + var seenProperties = new Dictionary(StringComparer.Ordinal); + + foreach (var mappingDefinition in mappingDefinitions) + { + foreach (var newProperty in GetMergedProperties(schema, mappingDefinition)) + { + // Already seen + if (seenProperties.TryGetValue(newProperty.Key, out PropertyValue existingProperty)) + { + // Types match + if (String.Equals(existingProperty.Type, newProperty.Value.Type, StringComparison.Ordinal)) + { + continue; + } + + // Collision + throw new ArgumentException($"'{name}' contains two mappings with the same property, but each refers to a different type. All matching properties must refer to the same type."); + } + // New + else + { + seenProperties.Add(newProperty.Key, newProperty.Value); + } + } + } + } + } + + private static IEnumerable> GetMergedProperties( + TemplateSchema schema, + MappingDefinition mapping) + { + foreach (var property in mapping.Properties) + { + yield return property; + } + + if (!String.IsNullOrEmpty(mapping.Inherits)) + { + var inherited = schema.GetDefinition(mapping.Inherits) as MappingDefinition; + + if (!String.IsNullOrEmpty(inherited.Inherits)) + { + throw new NotSupportedException("Multiple levels of inheritance is not supported"); + } + + foreach (var property in inherited.Properties) + { + if (!mapping.Properties.ContainsKey(property.Key)) + { + yield return property; + } + } + } + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/PropertyValue.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/PropertyValue.cs new file mode 100644 index 00000000000..5a95b0171df --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/PropertyValue.cs @@ -0,0 +1,18 @@ +using System; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class PropertyValue + { + internal PropertyValue() + { + } + + internal PropertyValue(String type) + { + Type = type; + } + + internal String Type { get; set; } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/ScalarDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/ScalarDefinition.cs new file mode 100644 index 00000000000..9e3fddaa807 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/ScalarDefinition.cs @@ -0,0 +1,19 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal abstract class ScalarDefinition : Definition + { + internal ScalarDefinition() + { + } + + internal ScalarDefinition(MappingToken definition) + : base(definition) + { + } + + internal abstract Boolean IsMatch(LiteralToken literal); + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/SequenceDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/SequenceDefinition.cs new file mode 100644 index 00000000000..ec1d5fcfeac --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/SequenceDefinition.cs @@ -0,0 +1,64 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class SequenceDefinition : Definition + { + internal SequenceDefinition() + { + } + + internal SequenceDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + + switch (definitionKey.Value) + { + case TemplateConstants.Sequence: + var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.Sequence}"); + foreach (var mappingPair in mapping) + { + var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Sequence} key"); + switch (mappingKey.Value) + { + case TemplateConstants.ItemType: + var itemType = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Sequence} {TemplateConstants.ItemType}"); + ItemType = itemType.Value; + break; + + default: + mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.Sequence} key"); + break; + } + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.Sequence; + + internal String ItemType { get; set; } + + internal override void Validate( + TemplateSchema schema, + String name) + { + if (String.IsNullOrEmpty(ItemType)) + { + throw new ArgumentException($"'{name}' does not define '{TemplateConstants.ItemType}'"); + } + + // Lookup item type + schema.GetDefinition(ItemType); + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/StringDefinition.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/StringDefinition.cs new file mode 100644 index 00000000000..7d34a3c6b31 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/StringDefinition.cs @@ -0,0 +1,104 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + internal sealed class StringDefinition : ScalarDefinition + { + internal StringDefinition() + { + } + + internal StringDefinition(MappingToken definition) + : base(definition) + { + foreach (var definitionPair in definition) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.String: + var mapping = definitionPair.Value.AssertMapping($"{TemplateConstants.Definition} {TemplateConstants.String}"); + foreach (var mappingPair in mapping) + { + var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.String} key"); + switch (mappingKey.Value) + { + case TemplateConstants.Constant: + var constantStringToken = mappingPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.Constant}"); + Constant = constantStringToken.Value; + break; + + case TemplateConstants.IgnoreCase: + var ignoreCaseBooleanToken = mappingPair.Value.AssertBoolean($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.IgnoreCase}"); + IgnoreCase = ignoreCaseBooleanToken.Value; + break; + + case TemplateConstants.RequireNonEmpty: + var requireNonEmptyBooleanToken = mappingPair.Value.AssertBoolean($"{TemplateConstants.Definition} {TemplateConstants.String} {TemplateConstants.RequireNonEmpty}"); + RequireNonEmpty = requireNonEmptyBooleanToken.Value; + break; + + default: + mappingKey.AssertUnexpectedValue($"{TemplateConstants.Definition} {TemplateConstants.String} key"); + break; + } + } + break; + + default: + definitionKey.AssertUnexpectedValue($"{TemplateConstants.Definition} key"); + break; + } + } + } + + internal override DefinitionType DefinitionType => DefinitionType.String; + + internal String Constant { get; set; } + + internal Boolean IgnoreCase { get; set; } + + internal Boolean RequireNonEmpty { get; set; } + + internal override Boolean IsMatch(LiteralToken literal) + { + if (literal is StringToken str) + { + var value = str.Value; + if (!String.IsNullOrEmpty(Constant)) + { + var comparison = IgnoreCase ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal; + + if (String.Equals(Constant, value, comparison)) + { + return true; + } + } + else if (RequireNonEmpty) + { + if (!String.IsNullOrEmpty(value)) + { + return true; + } + } + else + { + return true; + } + } + + return false; + } + + internal override void Validate( + TemplateSchema schema, + String name) + { + if (!String.IsNullOrEmpty(Constant) && RequireNonEmpty) + { + throw new ArgumentException($"Properties '{TemplateConstants.Constant}' and '{TemplateConstants.RequireNonEmpty}' cannot both be set"); + } + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/TemplateSchema.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/TemplateSchema.cs new file mode 100644 index 00000000000..9ac6b2453e9 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Schema/TemplateSchema.cs @@ -0,0 +1,480 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Text.RegularExpressions; +using System.Threading; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ + /// + /// This models the root schema object and contains definitions + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class TemplateSchema + { + internal TemplateSchema() + : this(null) + { + } + + private TemplateSchema(MappingToken mapping) + { + // Add built-in type: null + var nullDefinition = new NullDefinition(); + Definitions.Add(TemplateConstants.Null, nullDefinition); + + // Add built-in type: boolean + var booleanDefinition = new BooleanDefinition(); + Definitions.Add(TemplateConstants.Boolean, booleanDefinition); + + // Add built-in type: number + var numberDefinition = new NumberDefinition(); + Definitions.Add(TemplateConstants.Number, numberDefinition); + + // Add built-in type: string + var stringDefinition = new StringDefinition(); + Definitions.Add(TemplateConstants.String, stringDefinition); + + // Add built-in type: sequence + var sequenceDefinition = new SequenceDefinition { ItemType = TemplateConstants.Any }; + Definitions.Add(TemplateConstants.Sequence, sequenceDefinition); + + // Add built-in type: mapping + var mappingDefinition = new MappingDefinition { LooseKeyType = TemplateConstants.String, LooseValueType = TemplateConstants.Any }; + Definitions.Add(TemplateConstants.Mapping, mappingDefinition); + + // Add built-in type: any + var anyDefinition = new OneOfDefinition(); + anyDefinition.OneOf.Add(TemplateConstants.Null); + anyDefinition.OneOf.Add(TemplateConstants.Boolean); + anyDefinition.OneOf.Add(TemplateConstants.Number); + anyDefinition.OneOf.Add(TemplateConstants.String); + anyDefinition.OneOf.Add(TemplateConstants.Sequence); + anyDefinition.OneOf.Add(TemplateConstants.Mapping); + Definitions.Add(TemplateConstants.Any, anyDefinition); + + if (mapping != null) + { + foreach (var pair in mapping) + { + var key = pair.Key.AssertString($"{TemplateConstants.TemplateSchema} key"); + switch (key.Value) + { + case TemplateConstants.Version: + var version = pair.Value.AssertString(TemplateConstants.Version); + Version = version.Value; + break; + + case TemplateConstants.Definitions: + var definitions = pair.Value.AssertMapping(TemplateConstants.Definitions); + foreach (var definitionsPair in definitions) + { + var definitionsKey = definitionsPair.Key.AssertString($"{TemplateConstants.Definitions} key"); + var definitionsValue = definitionsPair.Value.AssertMapping(TemplateConstants.Definition); + var definition = default(Definition); + foreach (var definitionPair in definitionsValue) + { + var definitionKey = definitionPair.Key.AssertString($"{TemplateConstants.Definition} key"); + switch (definitionKey.Value) + { + case TemplateConstants.Null: + definition = new NullDefinition(definitionsValue); + break; + + case TemplateConstants.Boolean: + definition = new BooleanDefinition(definitionsValue); + break; + + case TemplateConstants.Number: + definition = new NumberDefinition(definitionsValue); + break; + + case TemplateConstants.String: + definition = new StringDefinition(definitionsValue); + break; + + case TemplateConstants.Sequence: + definition = new SequenceDefinition(definitionsValue); + break; + + case TemplateConstants.Mapping: + definition = new MappingDefinition(definitionsValue); + break; + + case TemplateConstants.OneOf: + definition = new OneOfDefinition(definitionsValue); + break; + + case TemplateConstants.Context: + case TemplateConstants.Description: + continue; + + default: + definitionKey.AssertUnexpectedValue("definition mapping key"); // throws + break; + } + + break; + } + + if (definition == null) + { + throw new ArgumentException($"Unable to determine definition details. Specify the '{TemplateConstants.Structure}' property"); + } + + Definitions.Add(definitionsKey.Value, definition); + } + break; + + default: + key.AssertUnexpectedValue($"{TemplateConstants.TemplateSchema} key"); // throws + break; + } + } + } + } + + internal Dictionary Definitions { get; } = new Dictionary(StringComparer.Ordinal); + + internal String Version { get; } + + /// + /// Loads a user's schema file + /// + internal static TemplateSchema Load(IObjectReader objectReader) + { + var context = new TemplateContext + { + CancellationToken = CancellationToken.None, + Errors = new TemplateValidationErrors(maxErrors: 10, maxMessageLength: 500), + Memory = new TemplateMemory( + maxDepth: 50, + maxEvents: 1000000, // 1 million + maxBytes: 1024 * 1024), // 1 mb + TraceWriter = new EmptyTraceWriter(), + }; + + var value = TemplateReader.Read(context, TemplateConstants.TemplateSchema, objectReader, null, Schema, out _); + + if (context.Errors.Count > 0) + { + throw new TemplateValidationException(context.Errors); + } + + var mapping = value.AssertMapping(TemplateConstants.TemplateSchema); + var schema = new TemplateSchema(mapping); + schema.Validate(); + return schema; + } + + internal IEnumerable Get(Definition definition) + where T : Definition + { + if (definition is T match) + { + yield return match; + } + else if (definition is OneOfDefinition oneOf) + { + foreach (var reference in oneOf.OneOf) + { + var nestedDefinition = GetDefinition(reference); + if (nestedDefinition is T match2) + { + yield return match2; + } + } + } + } + + internal Definition GetDefinition(String type) + { + if (Definitions.TryGetValue(type, out Definition value)) + { + return value; + } + + throw new ArgumentException($"Schema definition '{type}' not found"); + } + + internal Boolean HasProperties(MappingDefinition definition) + { + for (int i = 0; i < 10; i++) + { + if (definition.Properties.Count > 0) + { + return true; + } + + if (String.IsNullOrEmpty(definition.Inherits)) + { + return false; + } + + definition = GetDefinition(definition.Inherits) as MappingDefinition; + } + + throw new InvalidOperationException("Inheritance depth exceeded 10"); + } + + internal Boolean TryGetProperty( + MappingDefinition definition, + String name, + out String type) + { + for (int i = 0; i < 10; i++) + { + if (definition.Properties.TryGetValue(name, out PropertyValue property)) + { + type = property.Type; + return true; + } + + if (String.IsNullOrEmpty(definition.Inherits)) + { + type = default; + return false; + } + + definition = GetDefinition(definition.Inherits) as MappingDefinition; + } + + throw new InvalidOperationException("Inheritance depth exceeded 10"); + } + + internal Boolean TryMatchKey( + List definitions, + String key, + out String valueType) + { + valueType = null; + + // Check for a matching well known property + var notFoundInSome = false; + for (var i = 0; i < definitions.Count; i++) + { + var definition = definitions[i]; + + if (TryGetProperty(definition, key, out String t)) + { + if (valueType == null) + { + valueType = t; + } + } + else + { + notFoundInSome = true; + } + } + + // Check if found + if (valueType != null) + { + // Filter the matched definitions if needed + if (notFoundInSome) + { + for (var i = 0; i < definitions.Count;) + { + if (TryGetProperty(definitions[i], key, out _)) + { + i++; + } + else + { + definitions.RemoveAt(i); + } + } + } + + return true; + } + + return false; + } + + /// + /// The built-in schema for reading schema files + /// + private static TemplateSchema Schema + { + get + { + if (s_schema == null) + { + var schema = new TemplateSchema(); + + StringDefinition stringDefinition; + SequenceDefinition sequenceDefinition; + MappingDefinition mappingDefinition; + OneOfDefinition oneOfDefinition; + + // template-schema + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(TemplateConstants.NonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(TemplateConstants.Definitions)); + schema.Definitions.Add(TemplateConstants.TemplateSchema, mappingDefinition); + + // definitions + mappingDefinition = new MappingDefinition(); + mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString; + mappingDefinition.LooseValueType = TemplateConstants.Definition; + schema.Definitions.Add(TemplateConstants.Definitions, mappingDefinition); + + // definition + oneOfDefinition = new OneOfDefinition(); + oneOfDefinition.OneOf.Add(TemplateConstants.NullDefinition); + oneOfDefinition.OneOf.Add(TemplateConstants.BooleanDefinition); + oneOfDefinition.OneOf.Add(TemplateConstants.NumberDefinition); + oneOfDefinition.OneOf.Add(TemplateConstants.StringDefinition); + oneOfDefinition.OneOf.Add(TemplateConstants.SequenceDefinition); + oneOfDefinition.OneOf.Add(TemplateConstants.MappingDefinition); + oneOfDefinition.OneOf.Add(TemplateConstants.OneOfDefinition); + schema.Definitions.Add(TemplateConstants.Definition, oneOfDefinition); + + // null-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(TemplateConstants.NullDefinitionProperties)); + schema.Definitions.Add(TemplateConstants.NullDefinition, mappingDefinition); + + // null-definition-properties + mappingDefinition = new MappingDefinition(); + schema.Definitions.Add(TemplateConstants.NullDefinitionProperties, mappingDefinition); + + // boolean-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(TemplateConstants.BooleanDefinitionProperties)); + schema.Definitions.Add(TemplateConstants.BooleanDefinition, mappingDefinition); + + // boolean-definition-properties + mappingDefinition = new MappingDefinition(); + schema.Definitions.Add(TemplateConstants.BooleanDefinitionProperties, mappingDefinition); + + // number-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(TemplateConstants.NumberDefinitionProperties)); + schema.Definitions.Add(TemplateConstants.NumberDefinition, mappingDefinition); + + // number-definition-properties + mappingDefinition = new MappingDefinition(); + schema.Definitions.Add(TemplateConstants.NumberDefinitionProperties, mappingDefinition); + + // string-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(TemplateConstants.StringDefinitionProperties)); + schema.Definitions.Add(TemplateConstants.StringDefinition, mappingDefinition); + + // string-definition-properties + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(TemplateConstants.NonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(TemplateConstants.Boolean)); + mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(TemplateConstants.Boolean)); + schema.Definitions.Add(TemplateConstants.StringDefinitionProperties, mappingDefinition); + + // sequence-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(TemplateConstants.SequenceDefinitionProperties)); + schema.Definitions.Add(TemplateConstants.SequenceDefinition, mappingDefinition); + + // sequence-definition-properties + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(TemplateConstants.NonEmptyString)); + schema.Definitions.Add(TemplateConstants.SequenceDefinitionProperties, mappingDefinition); + + // mapping-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(TemplateConstants.MappingDefinitionProperties)); + schema.Definitions.Add(TemplateConstants.MappingDefinition, mappingDefinition); + + // mapping-definition-properties + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(TemplateConstants.Properties)); + mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(TemplateConstants.NonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(TemplateConstants.NonEmptyString)); + schema.Definitions.Add(TemplateConstants.MappingDefinitionProperties, mappingDefinition); + + // properties + mappingDefinition = new MappingDefinition(); + mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString; + mappingDefinition.LooseValueType = TemplateConstants.NonEmptyString; + schema.Definitions.Add(TemplateConstants.Properties, mappingDefinition); + + // one-of-definition + mappingDefinition = new MappingDefinition(); + mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String)); + mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString)); + schema.Definitions.Add(TemplateConstants.OneOfDefinition, mappingDefinition); + + // non-empty-string + stringDefinition = new StringDefinition(); + stringDefinition.RequireNonEmpty = true; + schema.Definitions.Add(TemplateConstants.NonEmptyString, stringDefinition); + + // sequence-of-non-empty-string + sequenceDefinition = new SequenceDefinition(); + sequenceDefinition.ItemType = TemplateConstants.NonEmptyString; + schema.Definitions.Add(TemplateConstants.SequenceOfNonEmptyString, sequenceDefinition); + + schema.Validate(); + + Interlocked.CompareExchange(ref s_schema, schema, null); + } + + return s_schema; + } + } + + private void Validate() + { + var oneOfPairs = new List>(); + + foreach (var pair in Definitions) + { + var name = pair.Key; + + if (!s_definitionNameRegex.IsMatch(name ?? String.Empty)) + { + throw new ArgumentException($"Invalid definition name '{name}'"); + } + + var definition = pair.Value; + + // Delay validation for 'one-of' definitions + if (definition is OneOfDefinition oneOf) + { + oneOfPairs.Add(new KeyValuePair(name, oneOf)); + } + // Otherwise validate now + else + { + definition.Validate(this, name); + } + } + + // Validate 'one-of' definitions + foreach (var pair in oneOfPairs) + { + var name = pair.Key; + var oneOf = pair.Value; + oneOf.Validate(this, name); + } + } + + private static readonly Regex s_definitionNameRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_-]*$", RegexOptions.Compiled); + private static TemplateSchema s_schema; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateConstants.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateConstants.cs new file mode 100644 index 00000000000..72ebae5ab22 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateConstants.cs @@ -0,0 +1,56 @@ +using System; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + internal static class TemplateConstants + { + internal const String Any = "any"; + internal const String Boolean = "boolean"; + internal const String BooleanDefinition = "boolean-definition"; + internal const String BooleanDefinitionProperties = "boolean-definition-properties"; + internal const String CloseExpression = "}}"; + internal const String Constant = "constant"; + internal const String Context = "context"; + internal const String Definition = "definition"; + internal const String Definitions = "definitions"; + internal const String Description = "description"; + internal const String False = "false"; + internal const String FalseConstant = "false-constant"; + internal const String IgnoreCase = "ignore-case"; + internal const String Inherits = "inherits"; + internal const String InsertDirective = "insert"; + internal const String ItemType = "item-type"; + internal const String LooseKeyType = "loose-key-type"; + internal const String LooseValueType = "loose-value-type"; + internal const String Mapping = "mapping"; + internal const String MappingDefinition = "mapping-definition"; + internal const String MappingDefinitionProperties = "mapping-definition-properties"; + internal const String NonEmptyString = "non-empty-string"; + internal const String Null = "null"; + internal const String NullDefinition = "null-definition"; + internal const String NullDefinitionProperties = "null-definition-properties"; + internal const String Number = "number"; + internal const String NumberDefinition = "number-definition"; + internal const String NumberDefinitionProperties = "number-definition-properties"; + internal const String OneOf = "one-of"; + internal const String OneOfDefinition = "one-of-definition"; + internal const String OpenExpression = "${{"; + internal const String Properties = "properties"; + internal const String RequireNonEmpty = "require-non-empty"; + internal const String Scalar = "scalar"; + internal const String ScalarDefinition = "scalar-definition"; + internal const String ScalarDefinitionProperties = "scalar-definition-properties"; + internal const String Sequence = "sequence"; + internal const String SequenceDefinition = "sequence-definition"; + internal const String SequenceDefinitionProperties = "sequence-definition-properties"; + internal const String SequenceOfNonEmptyString = "sequence-of-non-empty-string"; + internal const String String = "string"; + internal const String StringDefinition = "string-definition"; + internal const String StringDefinitionProperties = "string-definition-properties"; + internal const String Structure = "structure"; + internal const String TemplateSchema = "template-schema"; + internal const String True = "true"; + internal const String TrueConstant = "true-constant"; + internal const String Version = "version"; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateContext.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateContext.cs new file mode 100644 index 00000000000..34ea450719c --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateContext.cs @@ -0,0 +1,231 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Threading; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.ObjectTemplating.Schema; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Context object that is flowed through while loading and evaluating object templates + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class TemplateContext + { + internal CancellationToken CancellationToken { get; set; } + + internal TemplateValidationErrors Errors + { + get + { + if (m_errors == null) + { + m_errors = new TemplateValidationErrors(); + } + + return m_errors; + } + + set + { + m_errors = value; + } + } + + /// + /// Available functions within expression contexts + /// + internal IList ExpressionFunctions + { + get + { + if (m_expressionFunctions == null) + { + m_expressionFunctions = new List(); + } + + return m_expressionFunctions; + } + } + + /// + /// Available values within expression contexts + /// + internal IDictionary ExpressionValues + { + get + { + if (m_expressionValues == null) + { + m_expressionValues = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_expressionValues; + } + } + + internal TemplateMemory Memory { get; set; } + + internal TemplateSchema Schema { get; set; } + + internal IDictionary State + { + get + { + if (m_state == null) + { + m_state = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_state; + } + } + + internal ITraceWriter TraceWriter { get; set; } + + private IDictionary FileIds + { + get + { + if (m_fileIds == null) + { + m_fileIds = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_fileIds; + } + set + { + m_fileIds = value; + } + } + + private List FileNames + { + get + { + if (m_fileNames == null) + { + m_fileNames = new List(); + } + + return m_fileNames; + } + set + { + m_fileNames = value; + } + } + + internal void Error(TemplateValidationError error) + { + Errors.Add(error); + TraceWriter.Error(error.Message); + } + + internal void Error( + TemplateToken value, + Exception ex) + { + Error(value?.FileId, value?.Line, value?.Column, ex); + } + + internal void Error( + Int32? fileId, + Int32? line, + Int32? column, + Exception ex) + { + var prefix = GetErrorPrefix(fileId, line, column); + Errors.Add(prefix, ex); + TraceWriter.Error(prefix, ex); + } + + internal void Error( + TemplateToken value, + String message) + { + Error(value?.FileId, value?.Line, value?.Column, message); + } + + internal void Error( + Int32? fileId, + Int32? line, + Int32? column, + String message) + { + var prefix = GetErrorPrefix(fileId, line, column); + if (!String.IsNullOrEmpty(prefix)) + { + message = $"{prefix} {message}"; + } + + Errors.Add(message); + TraceWriter.Error(message); + } + + internal INamedValueInfo[] GetExpressionNamedValues() + { + if (m_expressionValues?.Count > 0) + { + return m_expressionValues.Keys.Select(x => new NamedValueInfo(x)).ToArray(); + } + + return null; + } + + internal Int32 GetFileId(String file) + { + if (!FileIds.TryGetValue(file, out Int32 id)) + { + id = FileIds.Count + 1; + FileIds.Add(file, id); + FileNames.Add(file); + } + + return id; + } + + internal String GetFileName(Int32 fileId) + { + return FileNames[fileId - 1]; + } + + private String GetErrorPrefix( + Int32? fileId, + Int32? line, + Int32? column) + { + if (fileId != null) + { + var fileName = GetFileName(fileId.Value); + if (line != null && column != null) + { + return $"{fileName} {TemplateStrings.LineColumn(line, column)}:"; + } + else + { + return $"{fileName}:"; + } + } + else if (line != null && column != null) + { + return $"{TemplateStrings.LineColumn(line, column)}:"; + } + else + { + return String.Empty; + } + } + + private TemplateValidationErrors m_errors; + private IList m_expressionFunctions; + private IDictionary m_expressionValues; + private IDictionary m_fileIds; + private List m_fileNames; + private IDictionary m_state; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateEvaluator.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateEvaluator.cs new file mode 100644 index 00000000000..48670a9f3a6 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateEvaluator.cs @@ -0,0 +1,433 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.DistributedTask.ObjectTemplating.Schema; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Expands expression tokens where the allowed context is available now. The allowed context is defined + /// within the schema. The available context is based on the ExpressionValues registered in the TemplateContext. + /// + internal partial class TemplateEvaluator + { + private TemplateEvaluator( + TemplateContext context, + TemplateToken template, + Int32 removeBytes) + { + m_context = context; + m_schema = context.Schema; + m_unraveler = new TemplateUnraveler(context, template, removeBytes); + } + + internal static TemplateToken Evaluate( + TemplateContext context, + String type, + TemplateToken template, + Int32 removeBytes, + Int32? fileId, + Boolean omitHeader = false) + { + TemplateToken result; + + if (!omitHeader) + { + if (fileId != null) + { + context.TraceWriter.Info("{0}", $"Begin evaluating template '{context.GetFileName(fileId.Value)}'"); + } + else + { + context.TraceWriter.Info("{0}", "Begin evaluating template"); + } + } + + var evaluator = new TemplateEvaluator(context, template, removeBytes); + try + { + var availableContext = new HashSet(context.ExpressionValues.Keys); + var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext); + result = evaluator.Evaluate(definitionInfo); + + if (result != null) + { + evaluator.m_unraveler.ReadEnd(); + } + } + catch (Exception ex) + { + context.Error(fileId, null, null, ex); + result = null; + } + + if (!omitHeader) + { + if (fileId != null) + { + context.TraceWriter.Info("{0}", $"Finished evaluating template '{context.GetFileName(fileId.Value)}'"); + } + else + { + context.TraceWriter.Info("{0}", "Finished evaluating template"); + } + } + + return result; + } + + private TemplateToken Evaluate(DefinitionInfo definition) + { + // Scalar + if (m_unraveler.AllowScalar(definition.Expand, out ScalarToken scalar)) + { + if (scalar is LiteralToken literal) + { + Validate(ref literal, definition); + return literal; + } + else + { + return scalar; + } + } + + // Sequence start + if (m_unraveler.AllowSequenceStart(definition.Expand, out SequenceToken sequence)) + { + var sequenceDefinition = definition.Get().FirstOrDefault(); + + // Legal + if (sequenceDefinition != null) + { + var itemDefinition = new DefinitionInfo(definition, sequenceDefinition.ItemType); + + // Add each item + while (!m_unraveler.AllowSequenceEnd(definition.Expand)) + { + var item = Evaluate(itemDefinition); + sequence.Add(item); + } + } + // Illegal + else + { + // Error + m_context.Error(sequence, TemplateStrings.UnexpectedSequenceStart()); + + // Skip each item + while (!m_unraveler.AllowSequenceEnd(expand: false)) + { + m_unraveler.SkipSequenceItem(); + } + } + + return sequence; + } + + // Mapping + if (m_unraveler.AllowMappingStart(definition.Expand, out MappingToken mapping)) + { + var mappingDefinitions = definition.Get().ToList(); + + // Legal + if (mappingDefinitions.Count > 0) + { + if (mappingDefinitions.Count > 1 || + m_schema.HasProperties(mappingDefinitions[0]) || + String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType)) + { + HandleMappingWithWellKnownProperties(definition, mappingDefinitions, mapping); + } + else + { + var keyDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseKeyType); + var valueDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseValueType); + HandleMappingWithAllLooseProperties(definition, keyDefinition, valueDefinition, mapping); + } + } + // Illegal + else + { + m_context.Error(mapping, TemplateStrings.UnexpectedMappingStart()); + + while (!m_unraveler.AllowMappingEnd(expand: false)) + { + m_unraveler.SkipMappingKey(); + m_unraveler.SkipMappingValue(); + } + } + + return mapping; + } + + throw new ArgumentException(TemplateStrings.ExpectedScalarSequenceOrMapping()); + } + + private void HandleMappingWithWellKnownProperties( + DefinitionInfo definition, + List mappingDefinitions, + MappingToken mapping) + { + // Check if loose properties are allowed + String looseKeyType = null; + String looseValueType = null; + DefinitionInfo? looseKeyDefinition = null; + DefinitionInfo? looseValueDefinition = null; + if (!String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType)) + { + looseKeyType = mappingDefinitions[0].LooseKeyType; + looseValueType = mappingDefinitions[0].LooseValueType; + } + + var keys = new HashSet(StringComparer.OrdinalIgnoreCase); + + while (m_unraveler.AllowScalar(definition.Expand, out ScalarToken nextKeyScalar)) + { + // Expression + if (nextKeyScalar is ExpressionToken) + { + var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any); + mapping.Add(nextKeyScalar, Evaluate(anyDefinition)); + continue; + } + + // Not a string, convert + if (!(nextKeyScalar is StringToken nextKey)) + { + nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString()); + } + + // Duplicate + if (!keys.Add(nextKey.Value)) + { + m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value)); + m_unraveler.SkipMappingValue(); + continue; + } + + // Well known + if (m_schema.TryMatchKey(mappingDefinitions, nextKey.Value, out String nextValueType)) + { + var nextValueDefinition = new DefinitionInfo(definition, nextValueType); + var nextValue = Evaluate(nextValueDefinition); + mapping.Add(nextKey, nextValue); + continue; + } + + // Loose + if (looseKeyType != null) + { + if (looseKeyDefinition == null) + { + looseKeyDefinition = new DefinitionInfo(definition, looseKeyType); + looseValueDefinition = new DefinitionInfo(definition, looseValueType); + } + + Validate(nextKey, looseKeyDefinition.Value); + var nextValue = Evaluate(looseValueDefinition.Value); + mapping.Add(nextKey, nextValue); + continue; + } + + // Error + m_context.Error(nextKey, TemplateStrings.UnexpectedValue(nextKey.Value)); + m_unraveler.SkipMappingValue(); + } + + // Only one + if (mappingDefinitions.Count > 1) + { + var hitCount = new Dictionary(); + foreach (MappingDefinition mapdef in mappingDefinitions) + { + foreach (String key in mapdef.Properties.Keys) + { + if (!hitCount.TryGetValue(key, out Int32 value)) + { + hitCount.Add(key, 1); + } + else + { + hitCount[key] = value + 1; + } + } + } + + List nonDuplicates = new List(); + foreach (String key in hitCount.Keys) + { + if (hitCount[key] == 1) + { + nonDuplicates.Add(key); + } + } + nonDuplicates.Sort(); + + String listToDeDuplicate = String.Join(", ", nonDuplicates); + m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate)); + } + + m_unraveler.ReadMappingEnd(); + } + + private void HandleMappingWithAllLooseProperties( + DefinitionInfo mappingDefinition, + DefinitionInfo keyDefinition, + DefinitionInfo valueDefinition, + MappingToken mapping) + { + var keys = new HashSet(StringComparer.OrdinalIgnoreCase); + + while (m_unraveler.AllowScalar(mappingDefinition.Expand, out ScalarToken nextKeyScalar)) + { + // Expression + if (nextKeyScalar is ExpressionToken) + { + if (nextKeyScalar is BasicExpressionToken) + { + mapping.Add(nextKeyScalar, Evaluate(valueDefinition)); + } + else + { + var anyDefinition = new DefinitionInfo(mappingDefinition, TemplateConstants.Any); + mapping.Add(nextKeyScalar, Evaluate(anyDefinition)); + } + + continue; + } + + // Not a string + if (!(nextKeyScalar is StringToken nextKey)) + { + nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString()); + } + + // Duplicate + if (!keys.Add(nextKey.Value)) + { + m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value)); + m_unraveler.SkipMappingValue(); + continue; + } + + // Validate + Validate(nextKey, keyDefinition); + + // Add the pair + var nextValue = Evaluate(valueDefinition); + mapping.Add(nextKey, nextValue); + } + + m_unraveler.ReadMappingEnd(); + } + + private void Validate( + StringToken stringToken, + DefinitionInfo definition) + { + var literal = stringToken as LiteralToken; + Validate(ref literal, definition); + } + + private void Validate( + ref LiteralToken literal, + DefinitionInfo definition) + { + // Legal + var literal2 = literal; + if (definition.Get().Any(x => x.IsMatch(literal2))) + { + return; + } + + // Not a string, convert + if (literal.Type != TokenType.String) + { + var stringToken = new StringToken(literal.FileId, literal.Line, literal.Column, literal.ToString()); + + // Legal + if (definition.Get().Any(x => x.IsMatch(stringToken))) + { + literal = stringToken; + return; + } + } + + // Illegal + m_context.Error(literal, TemplateStrings.UnexpectedValue(literal)); + } + + private void ValidateEnd() + { + m_unraveler.ReadEnd(); + } + + private struct DefinitionInfo + { + public DefinitionInfo( + TemplateSchema schema, + String name, + HashSet availableContext) + { + m_schema = schema; + m_availableContext = availableContext; + + // Lookup the definition + Definition = m_schema.GetDefinition(name); + + // Determine whether to expand + if (Definition.Context.Length > 0) + { + m_allowedContext = Definition.Context; + Expand = m_availableContext.IsSupersetOf(m_allowedContext); + } + else + { + m_allowedContext = new String[0]; + Expand = false; + } + } + + public DefinitionInfo( + DefinitionInfo parent, + String name) + { + m_schema = parent.m_schema; + m_availableContext = parent.m_availableContext; + + // Lookup the definition + Definition = m_schema.GetDefinition(name); + + // Determine whether to expand + if (Definition.Context.Length > 0) + { + m_allowedContext = new HashSet(parent.m_allowedContext.Concat(Definition.Context)).ToArray(); + Expand = m_availableContext.IsSupersetOf(m_allowedContext); + } + else + { + m_allowedContext = parent.m_allowedContext; + Expand = parent.Expand; + } + } + + public IEnumerable Get() + where T : Definition + { + return m_schema.Get(Definition); + } + + private HashSet m_availableContext; + private String[] m_allowedContext; + private TemplateSchema m_schema; + public Definition Definition; + public Boolean Expand; + } + + private readonly TemplateContext m_context; + private readonly TemplateSchema m_schema; + private readonly TemplateUnraveler m_unraveler; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateException.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateException.cs new file mode 100644 index 00000000000..cc9d57c691f --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateException.cs @@ -0,0 +1,90 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class TemplateException : VssServiceException + { + public TemplateException(String message) + : base(message) + { + } + + public TemplateException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + /// + /// Initializes an exception from serialized data + /// + /// object holding the serialized data + /// context info about the source or destination + protected TemplateException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class TemplateValidationException : TemplateException + { + public TemplateValidationException() + : this(TemplateStrings.TemplateNotValid()) + { + } + + public TemplateValidationException(IEnumerable errors) + : this(TemplateStrings.TemplateNotValidWithErrors(string.Join(",", (errors ?? Enumerable.Empty()).Select(e => e.Message)))) + { + m_errors = new List(errors ?? Enumerable.Empty()); + } + + public TemplateValidationException(String message) + : base(message) + { + } + + public TemplateValidationException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + public IList Errors + { + get + { + if (m_errors == null) + { + m_errors = new List(); + } + return m_errors; + } + } + + /// + /// Initializes an exception from serialized data + /// + /// object holding the serialized data + /// context info about the source or destination + protected TemplateValidationException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + + private List m_errors; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateMemory.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateMemory.cs new file mode 100644 index 00000000000..139f5422977 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateMemory.cs @@ -0,0 +1,302 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Tracks characteristics about the current memory usage (CPU, stack, size) + /// + internal sealed class TemplateMemory + { + internal TemplateMemory( + Int32 maxDepth, + Int32 maxEvents, + Int32 maxBytes) + { + m_maxDepth = maxDepth; + m_maxEvents = maxEvents; + m_maxBytes = maxBytes; + } + + public Int32 CurrentBytes => m_currentBytes; + + public Int32 MaxBytes => m_maxBytes; + + internal void AddBytes(Int32 bytes) + { + checked + { + m_currentBytes += bytes; + } + + if (m_currentBytes > m_maxBytes) + { + throw new InvalidOperationException(TemplateStrings.MaxObjectSizeExceeded()); + } + } + + internal void AddBytes(String value) + { + var bytes = CalculateBytes(value); + AddBytes(bytes); + } + + internal void AddBytes( + JToken value, + Boolean traverse) + { + var bytes = CalculateBytes(value, traverse); + AddBytes(bytes); + } + + internal void AddBytes( + TemplateToken value, + Boolean traverse = false) + { + var bytes = CalculateBytes(value, traverse); + AddBytes(bytes); + } + + internal void AddBytes(LiteralToken literal) + { + var bytes = CalculateBytes(literal); + AddBytes(bytes); + } + + internal void AddBytes(SequenceToken sequence) + { + var bytes = CalculateBytes(sequence); + AddBytes(bytes); + } + + internal void AddBytes(MappingToken mapping) + { + var bytes = CalculateBytes(mapping); + AddBytes(bytes); + } + + internal void AddBytes(BasicExpressionToken basicExpression) + { + var bytes = CalculateBytes(basicExpression); + AddBytes(bytes); + } + + internal void AddBytes(InsertExpressionToken insertExpression) + { + var bytes = CalculateBytes(insertExpression); + AddBytes(bytes); + } + + internal Int32 CalculateBytes(String value) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + checked + { + return StringBaseOverhead + ((value?.Length ?? 0) * sizeof(Char)); + } + } + + internal Int32 CalculateBytes( + JToken value, + Boolean traverse) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + + if (value is null) + { + return MinObjectSize; + } + + if (!traverse) + { + switch (value.Type) + { + case JTokenType.String: + checked + { + return StringBaseOverhead + (value.ToObject().Length * sizeof(Char)); + } + + case JTokenType.Property: + var property = value as JProperty; + checked + { + return StringBaseOverhead + ((property.Name?.Length ?? 0) * sizeof(Char)); + } + + case JTokenType.Array: + case JTokenType.Boolean: + case JTokenType.Float: + case JTokenType.Integer: + case JTokenType.Null: + case JTokenType.Object: + return MinObjectSize; + + default: + throw new NotSupportedException($"Unexpected JToken type '{value.Type}' when traversing object"); + } + } + + var result = 0; + do + { + // Descend as much as possible + while (true) + { + // Add bytes + var bytes = CalculateBytes(value, false); + checked + { + result += bytes; + } + + // Descend + if (value.HasValues) + { + value = value.First; + } + // No more descendants + else + { + break; + } + } + + // Next sibling or ancestor sibling + do + { + var sibling = value.Next; + + // Sibling found + if (sibling != null) + { + value = sibling; + break; + } + + // Ascend + value = value.Parent; + + } while (value != null); + + } while (value != null); + + return result; + } + + internal Int32 CalculateBytes( + TemplateToken value, + Boolean traverse = false) + { + var enumerable = traverse ? value.Traverse() : new[] { value }; + var result = 0; + foreach (var item in enumerable) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + switch (item.Type) + { + case TokenType.Null: + case TokenType.Boolean: + case TokenType.Number: + checked + { + result += MinObjectSize; + } + break; + + case TokenType.String: + var stringToken = item as StringToken; + checked + { + result += MinObjectSize + StringBaseOverhead + ((stringToken.Value?.Length ?? 0) * sizeof(Char)); + } + break; + + case TokenType.Sequence: + case TokenType.Mapping: + case TokenType.InsertExpression: + // Min object size is good enough. Allows for base + a few fields. + checked + { + result += MinObjectSize; + } + break; + + case TokenType.BasicExpression: + var basicExpression = item as BasicExpressionToken; + checked + { + result += MinObjectSize + StringBaseOverhead + ((basicExpression.Expression?.Length ?? 0) * sizeof(Char)); + } + break; + + default: + throw new NotSupportedException($"Unexpected template type '{item.Type}'"); + } + } + + return result; + } + + internal void SubtractBytes(Int32 bytes) + { + if (bytes > m_currentBytes) + { + throw new InvalidOperationException("Bytes to subtract exceeds total bytes"); + } + + m_currentBytes -= bytes; + } + + internal void SubtractBytes( + JToken value, + Boolean traverse) + { + var bytes = CalculateBytes(value, traverse); + SubtractBytes(bytes); + } + + internal void SubtractBytes( + TemplateToken value, + Boolean traverse = false) + { + var bytes = CalculateBytes(value, traverse); + SubtractBytes(bytes); + } + + internal void IncrementDepth() + { + if (m_depth++ >= m_maxDepth) + { + throw new InvalidOperationException(TemplateStrings.MaxObjectDepthExceeded()); + } + } + + internal void DecrementDepth() + { + m_depth--; + } + + internal void IncrementEvents() + { + if (m_events++ >= m_maxEvents) + { + throw new InvalidOperationException(TemplateStrings.MaxTemplateEventsExceeded()); + } + } + + internal const Int32 MinObjectSize = 24; + internal const Int32 StringBaseOverhead = 26; + private readonly Int32 m_maxDepth; + private readonly Int32 m_maxEvents; + private readonly Int32 m_maxBytes; + private Int32 m_depth; + private Int32 m_events; + private Int32 m_currentBytes; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateReader.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateReader.cs new file mode 100644 index 00000000000..56b149c3f08 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateReader.cs @@ -0,0 +1,818 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.ObjectTemplating.Schema; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Converts a source object format into a TemplateToken + /// + internal sealed class TemplateReader + { + private TemplateReader( + TemplateContext context, + TemplateSchema schema, + IObjectReader objectReader, + Int32? fileId) + { + m_context = context; + m_schema = schema; + m_memory = context.Memory; + m_objectReader = objectReader; + m_fileId = fileId; + } + + internal static TemplateToken Read( + TemplateContext context, + String type, + IObjectReader objectReader, + Int32? fileId, + out Int32 bytes) + { + return Read(context, type, objectReader, fileId, context.Schema, out bytes); + } + + internal static TemplateToken Read( + TemplateContext context, + String type, + IObjectReader objectReader, + Int32? fileId, + TemplateSchema schema, + out Int32 bytes) + { + TemplateToken result = null; + + var reader = new TemplateReader(context, schema, objectReader, fileId); + var originalBytes = context.Memory.CurrentBytes; + try + { + objectReader.ValidateStart(); + var definition = new DefinitionInfo(schema, type); + result = reader.ReadValue(definition); + objectReader.ValidateEnd(); + } + catch (Exception ex) + { + context.Error(fileId, null, null, ex); + } + finally + { + bytes = context.Memory.CurrentBytes - originalBytes; + } + + return result; + } + + private TemplateToken ReadValue(DefinitionInfo definition) + { + m_memory.IncrementEvents(); + + // Scalar + if (m_objectReader.AllowLiteral(out LiteralToken literal)) + { + var scalar = ParseScalar(literal, definition.AllowedContext); + Validate(ref scalar, definition); + m_memory.AddBytes(scalar); + return scalar; + } + + // Sequence + if (m_objectReader.AllowSequenceStart(out SequenceToken sequence)) + { + m_memory.IncrementDepth(); + m_memory.AddBytes(sequence); + + var sequenceDefinition = definition.Get().FirstOrDefault(); + + // Legal + if (sequenceDefinition != null) + { + var itemDefinition = new DefinitionInfo(definition, sequenceDefinition.ItemType); + + // Add each item + while (!m_objectReader.AllowSequenceEnd()) + { + var item = ReadValue(itemDefinition); + sequence.Add(item); + } + } + // Illegal + else + { + // Error + m_context.Error(sequence, TemplateStrings.UnexpectedSequenceStart()); + + // Skip each item + while (!m_objectReader.AllowSequenceEnd()) + { + SkipValue(); + } + } + + m_memory.DecrementDepth(); + return sequence; + } + + // Mapping + if (m_objectReader.AllowMappingStart(out MappingToken mapping)) + { + m_memory.IncrementDepth(); + m_memory.AddBytes(mapping); + + var mappingDefinitions = definition.Get().ToList(); + + // Legal + if (mappingDefinitions.Count > 0) + { + if (mappingDefinitions.Count > 1 || + m_schema.HasProperties(mappingDefinitions[0]) || + String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType)) + { + HandleMappingWithWellKnownProperties(definition, mappingDefinitions, mapping); + } + else + { + var keyDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseKeyType); + var valueDefinition = new DefinitionInfo(definition, mappingDefinitions[0].LooseValueType); + HandleMappingWithAllLooseProperties(definition, keyDefinition, valueDefinition, mapping); + } + } + // Illegal + else + { + m_context.Error(mapping, TemplateStrings.UnexpectedMappingStart()); + + while (!m_objectReader.AllowMappingEnd()) + { + SkipValue(); + SkipValue(); + } + } + + m_memory.DecrementDepth(); + return mapping; + } + + throw new InvalidOperationException(TemplateStrings.ExpectedScalarSequenceOrMapping()); + } + + private void HandleMappingWithWellKnownProperties( + DefinitionInfo definition, + List mappingDefinitions, + MappingToken mapping) + { + // Check if loose properties are allowed + String looseKeyType = null; + String looseValueType = null; + DefinitionInfo? looseKeyDefinition = null; + DefinitionInfo? looseValueDefinition = null; + if (!String.IsNullOrEmpty(mappingDefinitions[0].LooseKeyType)) + { + looseKeyType = mappingDefinitions[0].LooseKeyType; + looseValueType = mappingDefinitions[0].LooseValueType; + } + + var keys = new HashSet(StringComparer.OrdinalIgnoreCase); + + while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral)) + { + var nextKeyScalar = ParseScalar(rawLiteral, definition.AllowedContext); + + // Expression + if (nextKeyScalar is ExpressionToken) + { + // Legal + if (definition.AllowedContext.Length > 0) + { + m_memory.AddBytes(nextKeyScalar); + var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any); + mapping.Add(nextKeyScalar, ReadValue(anyDefinition)); + } + // Illegal + else + { + m_context.Error(nextKeyScalar, TemplateStrings.ExpressionNotAllowed()); + SkipValue(); + } + + continue; + } + + // Not a string, convert + if (!(nextKeyScalar is StringToken nextKey)) + { + nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString()); + } + + // Duplicate + if (!keys.Add(nextKey.Value)) + { + m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value)); + SkipValue(); + continue; + } + + // Well known + if (m_schema.TryMatchKey(mappingDefinitions, nextKey.Value, out String nextValueType)) + { + m_memory.AddBytes(nextKey); + var nextValueDefinition = new DefinitionInfo(definition, nextValueType); + var nextValue = ReadValue(nextValueDefinition); + mapping.Add(nextKey, nextValue); + continue; + } + + // Loose + if (looseKeyType != null) + { + if (looseKeyDefinition == null) + { + looseKeyDefinition = new DefinitionInfo(definition, looseKeyType); + looseValueDefinition = new DefinitionInfo(definition, looseValueType); + } + + Validate(nextKey, looseKeyDefinition.Value); + m_memory.AddBytes(nextKey); + var nextValue = ReadValue(looseValueDefinition.Value); + mapping.Add(nextKey, nextValue); + continue; + } + + // Error + m_context.Error(nextKey, TemplateStrings.UnexpectedValue(nextKey.Value)); + SkipValue(); + } + + // Only one + if (mappingDefinitions.Count > 1) + { + var hitCount = new Dictionary(); + foreach (MappingDefinition mapdef in mappingDefinitions) + { + foreach (String key in mapdef.Properties.Keys) + { + if (!hitCount.TryGetValue(key, out Int32 value)) + { + hitCount.Add(key, 1); + } + else + { + hitCount[key] = value + 1; + } + } + } + + List nonDuplicates = new List(); + foreach (String key in hitCount.Keys) + { + if(hitCount[key] == 1) + { + nonDuplicates.Add(key); + } + } + nonDuplicates.Sort(); + + String listToDeDuplicate = String.Join(", ", nonDuplicates); + m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate)); + } + + ExpectMappingEnd(); + } + + private void HandleMappingWithAllLooseProperties( + DefinitionInfo mappingDefinition, + DefinitionInfo keyDefinition, + DefinitionInfo valueDefinition, + MappingToken mapping) + { + TemplateToken nextValue; + var keys = new HashSet(StringComparer.OrdinalIgnoreCase); + + while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral)) + { + var nextKeyScalar = ParseScalar(rawLiteral, mappingDefinition.AllowedContext); + + // Expression + if (nextKeyScalar is ExpressionToken) + { + // Legal + if (mappingDefinition.AllowedContext.Length > 0) + { + m_memory.AddBytes(nextKeyScalar); + nextValue = ReadValue(valueDefinition); + mapping.Add(nextKeyScalar, nextValue); + } + // Illegal + else + { + m_context.Error(nextKeyScalar, TemplateStrings.ExpressionNotAllowed()); + SkipValue(); + } + + continue; + } + + // Not a string, convert + if (!(nextKeyScalar is StringToken nextKey)) + { + nextKey = new StringToken(nextKeyScalar.FileId, nextKeyScalar.Line, nextKeyScalar.Column, nextKeyScalar.ToString()); + } + + // Duplicate + if (!keys.Add(nextKey.Value)) + { + m_context.Error(nextKey, TemplateStrings.ValueAlreadyDefined(nextKey.Value)); + SkipValue(); + continue; + } + + // Validate + Validate(nextKey, keyDefinition); + m_memory.AddBytes(nextKey); + + // Add the pair + nextValue = ReadValue(valueDefinition); + mapping.Add(nextKey, nextValue); + } + + ExpectMappingEnd(); + } + + private void ExpectMappingEnd() + { + if (!m_objectReader.AllowMappingEnd()) + { + throw new Exception("Expected mapping end"); // Should never happen + } + } + + private void SkipValue(Boolean error = false) + { + m_memory.IncrementEvents(); + + // Scalar + if (m_objectReader.AllowLiteral(out LiteralToken literal)) + { + if (error) + { + m_context.Error(literal, TemplateStrings.UnexpectedValue(literal)); + } + + return; + } + + // Sequence + if (m_objectReader.AllowSequenceStart(out SequenceToken sequence)) + { + m_memory.IncrementDepth(); + + if (error) + { + m_context.Error(sequence, TemplateStrings.UnexpectedSequenceStart()); + } + + while (!m_objectReader.AllowSequenceEnd()) + { + SkipValue(); + } + + m_memory.DecrementDepth(); + return; + } + + // Mapping + if (m_objectReader.AllowMappingStart(out MappingToken mapping)) + { + m_memory.IncrementDepth(); + + if (error) + { + m_context.Error(mapping, TemplateStrings.UnexpectedMappingStart()); + } + + while (!m_objectReader.AllowMappingEnd()) + { + SkipValue(); + SkipValue(); + } + + m_memory.DecrementDepth(); + return; + } + + // Unexpected + throw new InvalidOperationException(TemplateStrings.ExpectedScalarSequenceOrMapping()); + } + + private void Validate( + StringToken stringToken, + DefinitionInfo definition) + { + var scalar = stringToken as ScalarToken; + Validate(ref scalar, definition); + } + + private void Validate( + ref ScalarToken scalar, + DefinitionInfo definition) + { + switch (scalar.Type) + { + case TokenType.Null: + case TokenType.Boolean: + case TokenType.Number: + case TokenType.String: + var literal = scalar as LiteralToken; + + // Legal + if (definition.Get().Any(x => x.IsMatch(literal))) + { + return; + } + + // Not a string, convert + if (literal.Type != TokenType.String) + { + literal = new StringToken(literal.FileId, literal.Line, literal.Column, literal.ToString()); + + // Legal + if (definition.Get().Any(x => x.IsMatch(literal))) + { + scalar = literal; + return; + } + } + + // Illegal + m_context.Error(literal, TemplateStrings.UnexpectedValue(literal)); + break; + + case TokenType.BasicExpression: + + // Illegal + if (definition.AllowedContext.Length == 0) + { + m_context.Error(scalar, TemplateStrings.ExpressionNotAllowed()); + } + + break; + + default: + m_context.Error(scalar, TemplateStrings.UnexpectedValue(scalar)); + break; + } + } + + private ScalarToken ParseScalar( + LiteralToken token, + String[] allowedContext) + { + // Not a string + if (token.Type != TokenType.String) + { + return token; + } + + // Check if the value is definitely a literal + var raw = token.ToString(); + Int32 startExpression; + if (String.IsNullOrEmpty(raw) || + (startExpression = raw.IndexOf(TemplateConstants.OpenExpression)) < 0) // Doesn't contain ${{ + { + return token; + } + + // Break the value into segments of LiteralToken and ExpressionToken + var segments = new List(); + var i = 0; + while (i < raw.Length) + { + // An expression starts here: + if (i == startExpression) + { + // Find the end of the expression - i.e. }} + startExpression = i; + var endExpression = -1; + var inString = false; + for (i += TemplateConstants.OpenExpression.Length; i < raw.Length; i++) + { + if (raw[i] == '\'') + { + inString = !inString; // Note, this handles escaped single quotes gracefully. Ex. 'foo''bar' + } + else if (!inString && raw[i] == '}' && raw[i - 1] == '}') + { + endExpression = i; + i++; + break; + } + } + + // Check if not closed + if (endExpression < startExpression) + { + m_context.Error(token, TemplateStrings.ExpressionNotClosed()); + return token; + } + + // Parse the expression + var rawExpression = raw.Substring( + startExpression + TemplateConstants.OpenExpression.Length, + endExpression - startExpression + 1 - TemplateConstants.OpenExpression.Length - TemplateConstants.CloseExpression.Length); + var expression = ParseExpression(token.Line, token.Column, rawExpression, allowedContext, out Exception ex); + + // Check for error + if (ex != null) + { + m_context.Error(token, ex); + return token; + } + + // Check if a directive was used when not allowed + if (!String.IsNullOrEmpty(expression.Directive) && + ((startExpression != 0) || (i < raw.Length))) + { + m_context.Error(token, TemplateStrings.DirectiveNotAllowedInline(expression.Directive)); + return token; + } + + // Add the segment + segments.Add(expression); + + // Look for the next expression + startExpression = raw.IndexOf(TemplateConstants.OpenExpression, i); + } + // The next expression is further ahead: + else if (i < startExpression) + { + // Append the segment + AddString(segments, token.Line, token.Column, raw.Substring(i, startExpression - i)); + + // Adjust the position + i = startExpression; + } + // No remaining expressions: + else + { + AddString(segments, token.Line, token.Column, raw.Substring(i)); + break; + } + } + + // Check if can convert to a literal + // For example, the escaped expression: ${{ '{{ this is a literal }}' }} + if (segments.Count == 1 && + segments[0] is BasicExpressionToken basicExpression && + IsExpressionString(basicExpression.Expression, out String str)) + { + return new StringToken(m_fileId, token.Line, token.Column, str); + } + + // Check if only ony segment + if (segments.Count == 1) + { + return segments[0]; + } + + // Build the new expression, using the format function + var format = new StringBuilder(); + var args = new StringBuilder(); + var argIndex = 0; + foreach (var segment in segments) + { + if (segment is StringToken literal) + { + var text = ExpressionUtility.StringEscape(literal.Value) // Escape quotes + .Replace("{", "{{") // Escape braces + .Replace("}", "}}"); + format.Append(text); + } + else + { + format.Append("{" + argIndex.ToString(CultureInfo.InvariantCulture) + "}"); // Append formatter + argIndex++; + + var expression = segment as BasicExpressionToken; + args.Append(", "); + args.Append(expression.Expression); + } + } + + return new BasicExpressionToken(m_fileId, token.Line, token.Column, $"format('{format}'{args})"); + } + + private ExpressionToken ParseExpression( + Int32? line, + Int32? column, + String value, + String[] allowedContext, + out Exception ex) + { + var trimmed = value.Trim(); + + // Check if the value is empty + if (String.IsNullOrEmpty(trimmed)) + { + ex = new ArgumentException(TemplateStrings.ExpectedExpression()); + return null; + } + + // Try to find a matching directive + List parameters; + if (MatchesDirective(trimmed, TemplateConstants.InsertDirective, 0, out parameters, out ex)) + { + return new InsertExpressionToken(m_fileId, line, column); + } + else if (ex != null) + { + return null; + } + + // Check if the value is an expression + if (!ExpressionToken.IsValidExpression(trimmed, allowedContext, out ex)) + { + return null; + } + + // Return the expression + return new BasicExpressionToken(m_fileId, line, column, trimmed); + } + + private void AddString( + List segments, + Int32? line, + Int32? column, + String value) + { + // If the last segment was a LiteralToken, then append to the last segment + if (segments.Count > 0 && segments[segments.Count - 1] is StringToken lastSegment) + { + segments[segments.Count - 1] = new StringToken(m_fileId, line, column, lastSegment.Value + value); + } + // Otherwise add a new LiteralToken + else + { + segments.Add(new StringToken(m_fileId, line, column, value)); + } + } + + private static Boolean MatchesDirective( + String trimmed, + String directive, + Int32 expectedParameters, + out List parameters, + out Exception ex) + { + if (trimmed.StartsWith(directive, StringComparison.Ordinal) && + (trimmed.Length == directive.Length || Char.IsWhiteSpace(trimmed[directive.Length]))) + { + parameters = new List(); + var startIndex = directive.Length; + var inString = false; + var parens = 0; + for (var i = startIndex; i < trimmed.Length; i++) + { + var c = trimmed[i]; + if (Char.IsWhiteSpace(c) && !inString && parens == 0) + { + if (startIndex < i) + { + parameters.Add(trimmed.Substring(startIndex, i - startIndex)); + } + + startIndex = i + 1; + } + else if (c == '\'') + { + inString = !inString; + } + else if (c == '(' && !inString) + { + parens++; + } + else if (c == ')' && !inString) + { + parens--; + } + } + + if (startIndex < trimmed.Length) + { + parameters.Add(trimmed.Substring(startIndex)); + } + + if (expectedParameters != parameters.Count) + { + ex = new ArgumentException(TemplateStrings.ExpectedNParametersFollowingDirective(expectedParameters, directive, parameters.Count)); + parameters = null; + return false; + } + + ex = null; + return true; + } + + ex = null; + parameters = null; + return false; + } + + private static Boolean IsExpressionString( + String trimmed, + out String str) + { + var builder = new StringBuilder(); + + var inString = false; + for (var i = 0; i < trimmed.Length; i++) + { + var c = trimmed[i]; + if (c == '\'') + { + inString = !inString; + + if (inString && i != 0) + { + builder.Append(c); + } + } + else if (!inString) + { + str = default; + return false; + } + else + { + builder.Append(c); + } + } + + str = builder.ToString(); + return true; + } + + private struct DefinitionInfo + { + public DefinitionInfo( + TemplateSchema schema, + String name) + { + m_schema = schema; + + // Lookup the definition + Definition = m_schema.GetDefinition(name); + + // Determine whether to expand + if (Definition.Context.Length > 0) + { + AllowedContext = Definition.Context; + } + else + { + AllowedContext = new String[0]; + } + } + + public DefinitionInfo( + DefinitionInfo parent, + String name) + { + m_schema = parent.m_schema; + + // Lookup the definition + Definition = m_schema.GetDefinition(name); + + // Determine whether to expand + if (Definition.Context.Length > 0) + { + AllowedContext = new HashSet(parent.AllowedContext.Concat(Definition.Context)).ToArray(); + } + else + { + AllowedContext = parent.AllowedContext; + } + } + + public IEnumerable Get() + where T : Definition + { + return m_schema.Get(Definition); + } + + private TemplateSchema m_schema; + public Definition Definition; + public String[] AllowedContext; + } + + private readonly TemplateContext m_context; + private readonly Int32? m_fileId; + private readonly TemplateMemory m_memory; + private readonly IObjectReader m_objectReader; + private readonly TemplateSchema m_schema; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateUnraveler.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateUnraveler.cs new file mode 100644 index 00000000000..5ccabfc783b --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateUnraveler.cs @@ -0,0 +1,1210 @@ +using System; +using System.Collections.Generic; +using System.Text; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// This class allows callers to easily traverse a template object. + /// This class hides the details of expression expansion, depth tracking, + /// and memory tracking. + /// + internal sealed class TemplateUnraveler + { + internal TemplateUnraveler( + TemplateContext context, + TemplateToken template, + Int32 removeBytes) + { + m_context = context; + m_memory = context.Memory; + + // Initialize the reader state + MoveFirst(template, removeBytes); + } + + internal Boolean AllowScalar( + Boolean expand, + out ScalarToken scalar) + { + m_memory.IncrementEvents(); + + if (expand) + { + Unravel(expand: true); + } + + if (m_current?.Value is ScalarToken scalarToken) + { + scalar = scalarToken; + + // Add bytes before they are emitted to the caller (so the caller doesn't have to track bytes) + m_memory.AddBytes(scalar); + + MoveNext(); + return true; + } + + scalar = null; + return false; + } + + internal Boolean AllowSequenceStart( + Boolean expand, + out SequenceToken sequence) + { + m_memory.IncrementEvents(); + + if (expand) + { + Unravel(expand: true); + } + + if (m_current is SequenceState sequenceState && sequenceState.IsStart) + { + sequence = new SequenceToken(sequenceState.Value.FileId, sequenceState.Value.Line, sequenceState.Value.Column); + + // Add bytes before they are emitted to the caller (so the caller doesn't have to track bytes) + m_memory.AddBytes(sequence); + + MoveNext(); + return true; + } + + sequence = null; + return false; + } + + internal Boolean AllowSequenceEnd(Boolean expand) + { + m_memory.IncrementEvents(); + + if (expand) + { + Unravel(expand: true); + } + + if (m_current is SequenceState sequenceState && sequenceState.IsEnd) + { + MoveNext(); + return true; + } + + return false; + } + + internal Boolean AllowMappingStart( + Boolean expand, + out MappingToken mapping) + { + m_memory.IncrementEvents(); + + if (expand) + { + Unravel(expand: true); + } + + if (m_current is MappingState mappingState && mappingState.IsStart) + { + mapping = new MappingToken(mappingState.Value.FileId, mappingState.Value.Line, mappingState.Value.Column); + + // Add bytes before they are emitted to the caller (so the caller doesn't have to track bytes) + m_memory.AddBytes(mapping); + + MoveNext(); + return true; + } + + mapping = null; + return false; + } + + internal Boolean AllowMappingEnd(Boolean expand) + { + m_memory.IncrementEvents(); + + if (expand) + { + Unravel(expand: true); + } + + if (m_current is MappingState mappingState && mappingState.IsEnd) + { + MoveNext(); + return true; + } + + return false; + } + + internal void ReadEnd() + { + m_memory.IncrementEvents(); + + if (m_current != null) + { + throw new InvalidOperationException("Expected end of template object. " + DumpState()); + } + } + + internal void ReadMappingEnd() + { + if (!AllowMappingEnd(expand: false)) + { + throw new InvalidOperationException("Unexpected state while attempting to read the mapping end. " + DumpState()); + } + } + + internal void SkipSequenceItem() + { + m_memory.IncrementEvents(); + + if (!(m_current?.Parent is SequenceState ancestor)) + { + throw new InvalidOperationException("Unexpected state while attempting to skip the current sequence item. " + DumpState()); + } + + MoveNext(skipNestedEvents: true); + } + + internal void SkipMappingKey() + { + m_memory.IncrementEvents(); + + if (!(m_current?.Parent is MappingState ancestor) || !ancestor.IsKey) + { + throw new InvalidOperationException("Unexpected state while attempting to skip the current mapping key. " + DumpState()); + } + + MoveNext(skipNestedEvents: true); + } + + internal void SkipMappingValue() + { + m_memory.IncrementEvents(); + + if (!(m_current?.Parent is MappingState ancestor) || ancestor.IsKey) + { + throw new InvalidOperationException("Unexpected state while attempting to skip the current mapping value. " + DumpState()); + } + + MoveNext(skipNestedEvents: true); + } + + private String DumpState() + { + var result = new StringBuilder(); + + if (m_current == null) + { + result.AppendLine("State: (null)"); + } + else + { + result.AppendLine("State:"); + result.AppendLine(); + + // Push state hierarchy + var stack = new Stack(); + var curr = m_current; + while (curr != null) + { + result.AppendLine(curr.ToString()); + curr = curr.Parent; + } + } + + return result.ToString(); + } + + private void MoveFirst( + TemplateToken value, + Int32 removeBytes) + { + if (!(value is LiteralToken) && !(value is SequenceToken) && !(value is MappingToken) && !(value is BasicExpressionToken)) + { + throw new NotSupportedException($"Unexpected type '{value?.GetType().Name}' when initializing object reader state"); + } + + m_memory.IncrementEvents(); + m_current = ReaderState.CreateState(null, value, m_context, removeBytes); + } + + private void MoveNext(Boolean skipNestedEvents = false) + { + m_memory.IncrementEvents(); + + if (m_current == null) + { + return; + } + + // Sequence start + if (m_current is SequenceState sequenceState && + sequenceState.IsStart && + !skipNestedEvents) + { + // Move to the first item or sequence end + m_current = sequenceState.Next(); + } + // Mapping start + else if (m_current is MappingState mappingState && + mappingState.IsStart && + !skipNestedEvents) + { + // Move to the first item key or mapping end + m_current = mappingState.Next(); + } + // Parent is a sequence + else if (m_current.Parent is SequenceState parentSequenceState) + { + // Move to the next item or sequence end + m_current.Remove(); + m_current = parentSequenceState.Next(); + } + // Parent is a mapping + else if (m_current.Parent is MappingState parentMappingState) + { + // Move to the next item value, item key, or mapping end + m_current.Remove(); + m_current = parentMappingState.Next(); + } + // Parent is an expression end + else if (m_current.Parent != null) + { + m_current.Remove(); + m_current = m_current.Parent; + } + // Parent is null + else + { + m_current.Remove(); + m_current = null; + } + + m_expanded = false; + Unravel(expand: false); + } + + private void Unravel(Boolean expand) + { + if (m_expanded) + { + return; + } + + do + { + if (m_current == null) + { + break; + } + // Literal + else if (m_current is LiteralState literalState) + { + break; + } + else if (m_current is BasicExpressionState basicExpressionState) + { + // Sequence item is a basic expression start + // For example: + // steps: + // - script: credScan + // - ${{ parameters.preBuild }} + // - script: build + if (basicExpressionState.IsStart && + m_current.Parent is SequenceState) + { + if (expand) + { + SequenceItemBasicExpression(); + } + else + { + break; + } + } + // Mapping key is a basic expression start + // For example: + // steps: + // - ${{ parameters.scriptHost }}: echo hi + else if (basicExpressionState.IsStart && + m_current.Parent is MappingState parentMappingState && + parentMappingState.IsKey) + { + if (expand) + { + MappingKeyBasicExpression(); + } + else + { + break; + } + } + // Mapping value is a basic expression start + // For example: + // steps: + // - script: credScan + // - script: ${{ parameters.tool }} + else if (basicExpressionState.IsStart && + m_current.Parent is MappingState parentMappingState2 && + !parentMappingState2.IsKey) + { + if (expand) + { + MappingValueBasicExpression(); + } + else + { + break; + } + } + else if (basicExpressionState.IsStart && + m_current.Parent is null) + { + if (expand) + { + RootBasicExpression(); + } + else + { + break; + } + } + // Basic expression end + else if (basicExpressionState.IsEnd) + { + EndExpression(); + } + else + { + UnexpectedState(); + } + } + else if (m_current is MappingState mappingState) + { + // Mapping end, closing an "insert" mapping insertion + if (mappingState.IsEnd && + m_current.Parent is InsertExpressionState) + { + m_current.Remove(); + m_current = m_current.Parent; // Skip to the expression end + } + // Normal mapping start + else if (mappingState.IsStart) + { + break; + } + // Normal mapping end + else if (mappingState.IsEnd) + { + break; + } + else + { + UnexpectedState(); + } + } + else if (m_current is SequenceState sequenceState) + { + // Sequence end, closing a sequence insertion + if (sequenceState.IsEnd && + m_current.Parent is BasicExpressionState && + m_current.Parent.Parent is SequenceState) + { + m_current.Remove(); + m_current = m_current.Parent; // Skip to the expression end + } + // Normal sequence start + else if (sequenceState.IsStart) + { + break; + } + // Normal sequence end + else if (sequenceState.IsEnd) + { + break; + } + else + { + UnexpectedState(); + } + } + else if (m_current is InsertExpressionState insertExpressionState) + { + // Mapping key, beginning an "insert" mapping insertion + // For example: + // - job: a + // variables: + // ${{ insert }}: ${{ parameters.jobVariables }} + if (insertExpressionState.IsStart && + m_current.Parent is MappingState parentMappingState && + parentMappingState.IsKey) + { + if (expand) + { + StartMappingInsertion(); + } + else + { + break; + } + } + // Expression end + else if (insertExpressionState.IsEnd) + { + EndExpression(); + } + // Not allowed + else if (insertExpressionState.IsStart) + { + m_context.Error(insertExpressionState.Value, TemplateStrings.DirectiveNotAllowed(insertExpressionState.Value.Directive)); + m_current.Remove(); + m_current = insertExpressionState.ToStringToken(); + } + else + { + UnexpectedState(); + } + } + else + { + UnexpectedState(); + } + + m_memory.IncrementEvents(); + } while (true); + + m_expanded = expand; + } + + private void SequenceItemBasicExpression() + { + // The template looks like: + // + // steps: + // - ${{ parameters.preSteps }} + // - script: build + // + // The current state looks like: + // + // MappingState // The document starts with a mapping + // + // SequenceState // The "steps" sequence + // + // BasicExpressionState // m_current + + var expressionState = m_current as BasicExpressionState; + var expression = expressionState.Value; + TemplateToken value; + var removeBytes = 0; + try + { + value = expression.EvaluateTemplateToken(expressionState.Context, out removeBytes); + } + catch (Exception ex) + { + m_context.Error(expression, ex); + value = null; + } + + // Move to the nested sequence, skip the sequence start + if (value is SequenceToken nestedSequence) + { + m_current = expressionState.Next(nestedSequence, isSequenceInsertion: true, removeBytes: removeBytes); + } + // Move to the new value + else if (value != null) + { + m_current = expressionState.Next(value, removeBytes); + } + // Move to the expression end + else if (value == null) + { + expressionState.End(); + } + } + + private void MappingKeyBasicExpression() + { + // The template looks like: + // + // steps: + // - ${{ parameters.scriptHost }}: echo hi + // + // The current state looks like: + // + // MappingState // The document starts with a mapping + // + // SequenceState // The "steps" sequence + // + // MappingState // The step mapping + // + // BasicExpressionState // m_current + + // The expression should evaluate to a string + var expressionState = m_current as BasicExpressionState; + var expression = expressionState.Value as BasicExpressionToken; + StringToken stringToken; + var removeBytes = 0; + try + { + stringToken = expression.EvaluateStringToken(expressionState.Context, out removeBytes); + } + catch (Exception ex) + { + m_context.Error(expression, ex); + stringToken = null; + } + + // Move to the stringToken + if (stringToken != null) + { + m_current = expressionState.Next(stringToken, removeBytes); + } + // Move to the next key or mapping end + else + { + m_current.Remove(); + var parentMappingState = m_current.Parent as MappingState; + parentMappingState.Next().Remove(); // Skip the value + m_current = parentMappingState.Next(); // Next key or mapping end + } + } + + private void MappingValueBasicExpression() + { + // The template looks like: + // + // steps: + // - script: credScan + // - script: ${{ parameters.tool }} + // + // The current state looks like: + // + // MappingState // The document starts with a mapping + // + // SequenceState // The "steps" sequence + // + // MappingState // The step mapping + // + // BasicExpressionState // m_current + + var expressionState = m_current as BasicExpressionState; + var expression = expressionState.Value; + TemplateToken value; + var removeBytes = 0; + try + { + value = expression.EvaluateTemplateToken(expressionState.Context, out removeBytes); + } + catch (Exception ex) + { + m_context.Error(expression, ex); + value = new StringToken(expression.FileId, expression.Line, expression.Column, String.Empty); + } + + // Move to the new value + m_current = expressionState.Next(value, removeBytes); + } + + private void RootBasicExpression() + { + // The template looks like: + // + // ${{ parameters.tool }} + // + // The current state looks like: + // + // BasicExpressionState // m_current + + var expressionState = m_current as BasicExpressionState; + var expression = expressionState.Value; + TemplateToken value; + var removeBytes = 0; + try + { + value = expression.EvaluateTemplateToken(expressionState.Context, out removeBytes); + } + catch (Exception ex) + { + m_context.Error(expression, ex); + value = new StringToken(expression.FileId, expression.Line, expression.Column, String.Empty); + } + + // Move to the new value + m_current = expressionState.Next(value, removeBytes); + } + + private void StartMappingInsertion() + { + // The template looks like: + // + // jobs: + // - job: a + // variables: + // ${{ insert }}: ${{ parameters.jobVariables }} + // + // The current state looks like: + // + // MappingState // The document starts with a mapping + // + // SequenceState // The "jobs" sequence + // + // MappingState // The "job" mapping + // + // MappingState // The "variables" mapping + // + // InsertExpressionState // m_current + + var expressionState = m_current as InsertExpressionState; + var parentMappingState = expressionState.Parent as MappingState; + var nestedValue = parentMappingState.Value[parentMappingState.Index].Value; + var nestedMapping = nestedValue as MappingToken; + var removeBytes = 0; + if (nestedMapping != null) + { + // Intentionally empty + } + else if (nestedValue is BasicExpressionToken basicExpression) + { + // The expression should evaluate to a mapping + try + { + nestedMapping = basicExpression.EvaluateMappingToken(expressionState.Context, out removeBytes); + } + catch (Exception ex) + { + m_context.Error(basicExpression, ex); + nestedMapping = null; + } + } + else + { + m_context.Error(nestedValue, TemplateStrings.ExpectedMapping()); + nestedMapping = null; + } + + // Move to the nested first key + if (nestedMapping?.Count > 0) + { + m_current = expressionState.Next(nestedMapping, removeBytes); + } + // Move to the expression end + else + { + if (removeBytes > 0) + { + m_memory.SubtractBytes(removeBytes); + } + + expressionState.End(); + } + } + + private void EndExpression() + { + // End of document + if (m_current.Parent == null) + { + m_current.Remove(); + m_current = null; + } + // End basic expression + else if (m_current is BasicExpressionState) + { + // Move to the next item or sequence end + if (m_current.Parent is SequenceState parentSequenceState) + { + m_current.Remove(); + m_current = parentSequenceState.Next(); + } + // Move to the next key, next value, or mapping end + else + { + m_current.Remove(); + var parentMappingState = m_current.Parent as MappingState; + m_current = parentMappingState.Next(); + } + } + // End "insert" mapping insertion + else + { + // Move to the next key or mapping end + m_current.Remove(); + var parentMappingState = m_current.Parent as MappingState; + parentMappingState.Next().Remove(); // Skip the value + m_current = parentMappingState.Next(); + } + } + + private void UnexpectedState() + { + throw new InvalidOperationException("Expected state while unraveling expressions. " + DumpState()); + } + + private abstract class ReaderState + { + public ReaderState( + ReaderState parent, + TemplateToken value, + TemplateContext context) + { + Parent = parent; + Value = value; + Context = context; + } + + public static ReaderState CreateState( + ReaderState parent, + TemplateToken value, + TemplateContext context, + Int32 removeBytes = 0) + { + switch (value.Type) + { + case TokenType.Null: + case TokenType.Boolean: + case TokenType.Number: + case TokenType.String: + return new LiteralState(parent, value as LiteralToken, context, removeBytes); + + case TokenType.Sequence: + return new SequenceState(parent, value as SequenceToken, context, removeBytes); + + case TokenType.Mapping: + return new MappingState(parent, value as MappingToken, context, removeBytes); + + case TokenType.BasicExpression: + return new BasicExpressionState(parent, value as BasicExpressionToken, context, removeBytes); + + case TokenType.InsertExpression: + if (removeBytes > 0) + { + throw new InvalidOperationException($"Unexpected {nameof(removeBytes)}"); + } + + return new InsertExpressionState(parent, value as InsertExpressionToken, context); + + default: + throw new NotSupportedException($"Unexpected {nameof(ReaderState)} type: {value?.GetType().Name}"); + } + } + + public ReaderState Parent { get; } + public TemplateContext Context { get; protected set; } + public TemplateToken Value { get; } + + public abstract void Remove(); + } + + private abstract class ReaderState : ReaderState + where T : class + { + public ReaderState( + ReaderState parent, + TemplateToken value, + TemplateContext context) + : base(parent, value, context) + { + } + + public new T Value + { + get + { + if (!Object.ReferenceEquals(base.Value, m_value)) + { + m_value = base.Value as T; + } + + return m_value; + } + } + + private T m_value; + } + + private sealed class LiteralState : ReaderState + { + public LiteralState( + ReaderState parent, + LiteralToken literal, + TemplateContext context, + Int32 removeBytes) + : base(parent, literal, context) + { + context.Memory.AddBytes(literal); + context.Memory.IncrementDepth(); + m_removeBytes = removeBytes; + } + + public override void Remove() + { + Context.Memory.SubtractBytes(Value); + Context.Memory.DecrementDepth(); + + // Subtract the memory overhead of the template token. + // We are now done traversing it and pointers to it no longer need to exist. + if (m_removeBytes > 0) + { + Context.Memory.SubtractBytes(m_removeBytes); + } + } + + public override String ToString() + { + var result = new StringBuilder(); + result.AppendLine($"{GetType().Name}"); + return result.ToString(); + } + + private Int32 m_removeBytes; + } + + private sealed class SequenceState : ReaderState + { + public SequenceState( + ReaderState parent, + SequenceToken sequence, + TemplateContext context, + Int32 removeBytes) + : base(parent, sequence, context) + { + context.Memory.AddBytes(sequence); + context.Memory.IncrementDepth(); + m_removeBytes = removeBytes; + } + + /// + /// Indicates whether the state represents the sequence-start event + /// + public Boolean IsStart { get; private set; } = true; + + /// + /// The current index within the sequence + /// + public Int32 Index { get; private set; } + + /// + /// Indicates whether the state represents the sequence-end event + /// + public Boolean IsEnd => !IsStart && Index >= Value.Count; + + public ReaderState Next() + { + // Adjust the state + if (IsStart) + { + IsStart = false; + } + else + { + Index++; + } + + // Return the next event + if (!IsEnd) + { + return CreateState(this, Value[Index], Context); + } + else + { + return this; + } + } + + public ReaderState End() + { + IsStart = false; + Index = Value.Count; + return this; + } + + public override void Remove() + { + Context.Memory.SubtractBytes(Value); + Context.Memory.DecrementDepth(); + + // Subtract the memory overhead of the template token. + // We are now done traversing it and pointers to it no longer need to exist. + if (m_removeBytes > 0) + { + Context.Memory.SubtractBytes(m_removeBytes); + } + } + + public override String ToString() + { + var result = new StringBuilder(); + result.AppendLine($"{GetType().Name}:"); + result.AppendLine($" IsStart: {IsStart}"); + result.AppendLine($" Index: {Index}"); + result.AppendLine($" IsEnd: {IsEnd}"); + return result.ToString(); + } + + private Int32 m_removeBytes; + } + + private sealed class MappingState : ReaderState + { + public MappingState( + ReaderState parent, + MappingToken mapping, + TemplateContext context, + Int32 removeBytes) + : base(parent, mapping, context) + { + context.Memory.AddBytes(mapping); + context.Memory.IncrementDepth(); + m_removeBytes = removeBytes; + } + + /// + /// Indicates whether the state represents the mapping-start event + /// + public Boolean IsStart { get; private set; } = true; + + /// + /// The current index within the mapping + /// + public Int32 Index { get; private set; } + + /// + /// Indicates whether the state represents a mapping-key position + /// + public Boolean IsKey { get; private set; } + + /// + /// Indicates whether the state represents the mapping-end event + /// + public Boolean IsEnd => !IsStart && Index >= Value.Count; + + public ReaderState Next() + { + // Adjust the state + if (IsStart) + { + IsStart = false; + IsKey = true; + } + else if (IsKey) + { + IsKey = false; + } + else + { + Index++; + IsKey = true; + } + + // Return the next event + if (!IsEnd) + { + if (IsKey) + { + return CreateState(this, Value[Index].Key, Context); + } + else + { + return CreateState(this, Value[Index].Value, Context); + } + } + else + { + return this; + } + } + + public ReaderState End() + { + IsStart = false; + Index = Value.Count; + return this; + } + + public override void Remove() + { + Context.Memory.SubtractBytes(Value); + Context.Memory.DecrementDepth(); + + // Subtract the memory overhead of the template token. + // We are now done traversing it and pointers to it no longer need to exist. + if (m_removeBytes > 0) + { + Context.Memory.SubtractBytes(m_removeBytes); + } + } + + public override String ToString() + { + var result = new StringBuilder(); + result.AppendLine($"{GetType().Name}:"); + result.AppendLine($" IsStart: {IsStart}"); + result.AppendLine($" Index: {Index}"); + result.AppendLine($" IsKey: {IsKey}"); + result.AppendLine($" IsEnd: {IsEnd}"); + return result.ToString(); + } + + private Int32 m_removeBytes; + } + + private sealed class BasicExpressionState : ReaderState + { + public BasicExpressionState( + ReaderState parent, + BasicExpressionToken expression, + TemplateContext context, + Int32 removeBytes) + : base(parent, expression, context) + { + context.Memory.AddBytes(expression); + context.Memory.IncrementDepth(); + m_removeBytes = removeBytes; + } + + /// + /// Indicates whether entering the expression + /// + public Boolean IsStart { get; private set; } = true; + + /// + /// Indicates whether leaving the expression + /// + public Boolean IsEnd => !IsStart; + + public ReaderState Next( + TemplateToken value, + Int32 removeBytes = 0) + { + // Adjust the state + IsStart = false; + + // Return the nested state + return CreateState(this, value, Context, removeBytes); + } + + public ReaderState Next( + SequenceToken value, + Boolean isSequenceInsertion = false, + Int32 removeBytes = 0) + { + // Adjust the state + IsStart = false; + + // Create the nested state + var nestedState = CreateState(this, value, Context, removeBytes); + if (isSequenceInsertion) + { + var nestedSequenceState = nestedState as SequenceState; + return nestedSequenceState.Next(); // Skip the sequence start + } + else + { + return nestedState; + } + } + + public ReaderState End() + { + IsStart = false; + return this; + } + + public override void Remove() + { + Context.Memory.SubtractBytes(Value); + Context.Memory.DecrementDepth(); + + // Subtract the memory overhead of the template token. + // We are now done traversing it and pointers to it no longer need to exist. + if (m_removeBytes > 0) + { + Context.Memory.SubtractBytes(m_removeBytes); + } + } + + public override String ToString() + { + var result = new StringBuilder(); + result.AppendLine($"{GetType().Name}:"); + result.AppendLine($" IsStart: {IsStart}"); + return result.ToString(); + } + + private Int32 m_removeBytes; + } + + private sealed class InsertExpressionState : ReaderState + { + public InsertExpressionState( + ReaderState parent, + InsertExpressionToken expression, + TemplateContext context) + : base(parent, expression, context) + { + Context.Memory.AddBytes(expression); + Context.Memory.IncrementDepth(); + } + + /// + /// Indicates whether entering or leaving the expression + /// + public Boolean IsStart { get; private set; } = true; + + /// + /// Indicates whether leaving the expression + /// + public Boolean IsEnd => !IsStart; + + public ReaderState Next( + MappingToken value, + Int32 removeBytes = 0) + { + // Adjust the state + IsStart = false; + + // Create the nested state + var nestedState = CreateState(this, value, Context, removeBytes) as MappingState; + return nestedState.Next(); // Skip the mapping start + } + + public ReaderState End() + { + IsStart = false; + return this; + } + + /// + /// This happens when the expression is not allowed + /// + public ReaderState ToStringToken() + { + var literal = new StringToken(Value.FileId, Value.Line, Value.Column, $"{TemplateConstants.OpenExpression} {Value.Directive} {TemplateConstants.CloseExpression}"); + return CreateState(Parent, literal, Context); + } + + public override void Remove() + { + Context.Memory.SubtractBytes(Value); + Context.Memory.DecrementDepth(); + } + + public override String ToString() + { + var result = new StringBuilder(); + result.AppendLine($"{GetType().Name}:"); + result.AppendLine($" IsStart: {IsStart}"); + return result.ToString(); + } + } + + private readonly TemplateContext m_context; + private readonly TemplateMemory m_memory; + private ReaderState m_current; + private Boolean m_expanded; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationError.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationError.cs new file mode 100644 index 00000000000..e5c590d9c14 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationError.cs @@ -0,0 +1,62 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Provides information about an error which occurred during validation. + /// + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public class TemplateValidationError + { + public TemplateValidationError() + { + } + + public TemplateValidationError(String message) + : this(null, message) + { + } + + public TemplateValidationError( + String code, + String message) + { + Code = code; + Message = message; + } + + [DataMember(EmitDefaultValue = false)] + public String Code + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Message + { + get; + set; + } + + public static IEnumerable Create(Exception exception) + { + for (int i = 0; i < 50; i++) + { + yield return new TemplateValidationError(exception.Message); + if (exception.InnerException == null) + { + break; + } + + exception = exception.InnerException; + } + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationErrors.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationErrors.cs new file mode 100644 index 00000000000..4b1e738d0e3 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateValidationErrors.cs @@ -0,0 +1,110 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Provides information about an error which occurred during validation. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class TemplateValidationErrors : IEnumerable + { + public TemplateValidationErrors() + { + } + + public TemplateValidationErrors( + Int32 maxErrors, + Int32 maxMessageLength) + { + m_maxErrors = maxErrors; + m_maxMessageLength = maxMessageLength; + } + + public Int32 Count => m_errors.Count; + + public void Add(String message) + { + Add(new TemplateValidationError(message)); + } + + public void Add(Exception ex) + { + Add(null, ex); + } + + public void Add(String messagePrefix, Exception ex) + { + for (int i = 0; i < 50; i++) + { + String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.Message; + Add(new TemplateValidationError(message)); + if (ex.InnerException == null) + { + break; + } + + ex = ex.InnerException; + } + } + + public void Add(IEnumerable errors) + { + foreach (var error in errors) + { + Add(error); + } + } + + public void Add(TemplateValidationError error) + { + // Check max errors + if (m_maxErrors <= 0 || + m_errors.Count < m_maxErrors) + { + // Check max message length + if (m_maxMessageLength > 0 && + error.Message?.Length > m_maxMessageLength) + { + error = new TemplateValidationError(error.Code, error.Message.Substring(0, m_maxMessageLength) + "[...]"); + } + + m_errors.Add(error); + } + } + + /// + /// Throws if any errors. + /// + public void Check() + { + if (m_errors.Count > 0) + { + throw new TemplateValidationException(m_errors); + } + } + + public void Clear() + { + m_errors.Clear(); + } + + public IEnumerator GetEnumerator() + { + return (m_errors as IEnumerable).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return (m_errors as IEnumerable).GetEnumerator(); + } + + private readonly List m_errors = new List(); + private readonly Int32 m_maxErrors; + private readonly Int32 m_maxMessageLength; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateWriter.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateWriter.cs new file mode 100644 index 00000000000..cdfbdf34065 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/TemplateWriter.cs @@ -0,0 +1,72 @@ +using System; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + /// + /// Converts from a TemplateToken into another object format + /// + internal sealed class TemplateWriter + { + internal static void Write( + IObjectWriter objectWriter, + TemplateToken value) + { + objectWriter.WriteStart(); + WriteValue(objectWriter, value); + objectWriter.WriteEnd(); + } + + private static void WriteValue( + IObjectWriter objectWriter, + TemplateToken value) + { + switch (value?.Type ?? TokenType.Null) + { + case TokenType.Null: + objectWriter.WriteNull(); + break; + + case TokenType.Boolean: + var booleanToken = value as BooleanToken; + objectWriter.WriteBoolean(booleanToken.Value); + break; + + case TokenType.Number: + var numberToken = value as NumberToken; + objectWriter.WriteNumber(numberToken.Value); + break; + + case TokenType.String: + case TokenType.BasicExpression: + case TokenType.InsertExpression: + objectWriter.WriteString(value.ToString()); + break; + + case TokenType.Mapping: + var mappingToken = value as MappingToken; + objectWriter.WriteMappingStart(); + foreach (var pair in mappingToken) + { + WriteValue(objectWriter, pair.Key); + WriteValue(objectWriter, pair.Value); + } + objectWriter.WriteMappingEnd(); + break; + + case TokenType.Sequence: + var sequenceToken = value as SequenceToken; + objectWriter.WriteSequenceStart(); + foreach (var item in sequenceToken) + { + WriteValue(objectWriter, item); + } + objectWriter.WriteSequenceEnd(); + break; + + default: + throw new NotSupportedException($"Unexpected type '{value.GetType()}'"); + } + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BasicExpressionToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BasicExpressionToken.cs new file mode 100644 index 00000000000..db77592528f --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BasicExpressionToken.cs @@ -0,0 +1,146 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Expressions2.Sdk.Functions; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.Services.WebApi.Internal; +using Container = GitHub.DistributedTask.Expressions2.Sdk.Container; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class BasicExpressionToken : ExpressionToken + { + internal BasicExpressionToken( + Int32? fileId, + Int32? line, + Int32? column, + String expression) + : base(TokenType.BasicExpression, fileId, line, column, null) + { + m_expression = expression; + } + + internal String Expression + { + get + { + if (m_expression == null) + { + m_expression = String.Empty; + } + + return m_expression; + } + } + + public override TemplateToken Clone(Boolean omitSource) + { + return omitSource ? new BasicExpressionToken(null, null, null, m_expression) : new BasicExpressionToken(FileId, Line, Column, m_expression); + } + + public override String ToString() + { + return $"{TemplateConstants.OpenExpression} {m_expression} {TemplateConstants.CloseExpression}"; + } + + public override String ToDisplayString() + { + var expressionParser = new ExpressionParser(); + var expressionNode = expressionParser.ValidateSyntax(Expression, null); + if (expressionNode is Format formatNode) + { + // Make sure our first item is indeed a literal string so we can format it. + if (formatNode.Parameters.Count > 1 && + formatNode.Parameters.First() is Literal literalValueNode && + literalValueNode.Kind == ValueKind.String) + { + // Get all other Parameters san the formatted string to pass into the formatter + var formatParameters = formatNode.Parameters.Skip(1).Select(x => this.ConvertFormatParameterToExpression(x)).ToArray(); + if (formatParameters.Length > 0) + { + String formattedString = String.Empty; + try + { + formattedString = String.Format(CultureInfo.InvariantCulture, (formatNode.Parameters[0] as Literal).Value as String, formatParameters); + } + catch (FormatException) { } + catch (ArgumentNullException) { } // If this operation fails, revert to default display name + if (!String.IsNullOrEmpty(formattedString)) + { + return TrimDisplayString(formattedString); + } + } + } + } + return base.ToDisplayString(); + } + + internal StringToken EvaluateStringToken( + TemplateContext context, + out Int32 bytes) + { + return EvaluateStringToken(context, Expression, out bytes); + } + + internal MappingToken EvaluateMappingToken( + TemplateContext context, + out Int32 bytes) + { + return EvaluateMappingToken(context, Expression, out bytes); + } + + internal SequenceToken EvaluateSequenceToken( + TemplateContext context, + out Int32 bytes) + { + return EvaluateSequenceToken(context, Expression, out bytes); + } + + internal TemplateToken EvaluateTemplateToken( + TemplateContext context, + out Int32 bytes) + { + return EvaluateTemplateToken(context, Expression, out bytes); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_expression?.Length == 0) + { + m_expression = null; + } + } + + private String ConvertFormatParameterToExpression(ExpressionNode node) + { + var nodeString = node.ConvertToExpression(); + + // If the node is a container, see if it starts with '(' and ends with ')' so we can simplify the string + // Should only simplify if only one '(' or ')' exists in the string + // We are trying to simplify the case (a || b) to a || b + // But we should avoid simplifying ( a && b + if (node is Container && + nodeString.Length > 2 && + nodeString[0] == ExpressionConstants.StartParameter && + nodeString[nodeString.Length - 1] == ExpressionConstants.EndParameter && + nodeString.Count(character => character == ExpressionConstants.StartParameter) == 1 && + nodeString.Count(character => character == ExpressionConstants.EndParameter) == 1) + { + nodeString = nodeString = nodeString.Substring(1, nodeString.Length - 2); + } + return String.Concat(TemplateConstants.OpenExpression, " ", nodeString, " ", TemplateConstants.CloseExpression); + } + + [DataMember(Name = "expr", EmitDefaultValue = false)] + private String m_expression; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BooleanToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BooleanToken.cs new file mode 100644 index 00000000000..8b138bf2369 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/BooleanToken.cs @@ -0,0 +1,44 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class BooleanToken : LiteralToken, IBoolean + { + public BooleanToken( + Int32? fileId, + Int32? line, + Int32? column, + Boolean value) + : base(TokenType.Boolean, fileId, line, column) + { + m_value = value; + } + + public Boolean Value => m_value; + + public override TemplateToken Clone(Boolean omitSource) + { + return omitSource ? new BooleanToken(null, null, null, m_value) : new BooleanToken(FileId, Line, Column, m_value); + } + + public override String ToString() + { + return m_value ? "true" : "false"; + } + + Boolean IBoolean.GetBoolean() + { + return Value; + } + + [DataMember(Name = "bool", EmitDefaultValue = false)] + private Boolean m_value; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ExpressionToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ExpressionToken.cs new file mode 100644 index 00000000000..0709e236cd7 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ExpressionToken.cs @@ -0,0 +1,64 @@ +using System; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + /// + /// Base class for all template expression tokens + /// + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ExpressionToken : ScalarToken + { + internal ExpressionToken( + Int32 templateType, + Int32? fileId, + Int32? line, + Int32? column, + String directive) + : base(templateType, fileId, line, column) + { + Directive = directive; + } + + [DataMember(Name = "directive", EmitDefaultValue = false)] + internal String Directive { get; } + + internal static Boolean IsValidExpression( + String expression, + String[] allowedContext, + out Exception ex) + { + // Create dummy allowed contexts + INamedValueInfo[] namedValues = null; + if (allowedContext?.Length > 0) + { + namedValues = allowedContext.Select(x => new NamedValueInfo(x)).ToArray(); + } + + // Parse + Boolean result; + ExpressionNode root = null; + try + { + root = new ExpressionParser().CreateTree(expression, null, namedValues, null) as ExpressionNode; + + result = true; + ex = null; + } + catch (Exception exception) + { + result = false; + ex = exception; + } + + return result; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/InsertExpressionToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/InsertExpressionToken.cs new file mode 100644 index 00000000000..f8be83cbeaa --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/InsertExpressionToken.cs @@ -0,0 +1,31 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class InsertExpressionToken : ExpressionToken + { + internal InsertExpressionToken( + Int32? fileId, + Int32? line, + Int32? column) + : base(TokenType.InsertExpression, fileId, line, column, TemplateConstants.InsertDirective) + { + } + + public override TemplateToken Clone(Boolean omitSource) + { + return omitSource ? new InsertExpressionToken(null, null, null) : new InsertExpressionToken(FileId, Line, Column); + } + + public override String ToString() + { + return $"{TemplateConstants.OpenExpression} insert {TemplateConstants.CloseExpression}"; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/LiteralToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/LiteralToken.cs new file mode 100644 index 00000000000..17f6926e84e --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/LiteralToken.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class LiteralToken : ScalarToken + { + public LiteralToken( + Int32 tokenType, + Int32? fileId, + Int32? line, + Int32? column) + : base(tokenType, fileId, line, column) + { + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/MappingToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/MappingToken.cs new file mode 100644 index 00000000000..ce8b08e3c38 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/MappingToken.cs @@ -0,0 +1,245 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [JsonObject] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class MappingToken : TemplateToken, IEnumerable>, IReadOnlyObject + { + public MappingToken( + Int32? fileId, + Int32? line, + Int32? column) + : base(TokenType.Mapping, fileId, line, column) + { + } + + internal Int32 Count => m_items?.Count ?? 0; + + // IReadOnlyObject (for expressions) + Int32 IReadOnlyObject.Count + { + get + { + InitializeDictionary(); + return m_dictionary.Count; + } + } + + // IReadOnlyObject (for expressions) + IEnumerable IReadOnlyObject.Keys + { + get + { + InitializeDictionary(); + foreach (var key in m_dictionary.Keys) + { + yield return key as String; + } + } + } + + // IReadOnlyObject (for expressions) + IEnumerable IReadOnlyObject.Values + { + get + { + InitializeDictionary(); + foreach (var value in m_dictionary.Values) + { + yield return value; + } + } + } + + public KeyValuePair this[Int32 index] + { + get + { + return m_items[index]; + } + + set + { + m_items[index] = value; + m_dictionary = null; + } + } + + // IReadOnlyObject (for expressions) + Object IReadOnlyObject.this[String key] + { + get + { + InitializeDictionary(); + return m_dictionary[key]; + } + } + + public void Add(IEnumerable> items) + { + foreach (var item in items) + { + Add(item); + } + } + + public void Add(KeyValuePair item) + { + if (m_items == null) + { + m_items = new List>(); + } + + m_items.Add(item); + m_dictionary = null; + } + + public void Add( + ScalarToken key, + TemplateToken value) + { + Add(new KeyValuePair(key, value)); + } + + public override TemplateToken Clone(Boolean omitSource) + { + var result = omitSource ? new MappingToken(null, null, null) : new MappingToken(FileId, Line, Column); + if (m_items?.Count > 0) + { + foreach (var pair in m_items) + { + result.Add(pair.Key?.Clone() as ScalarToken, pair.Value?.Clone()); + } + } + return result; + } + + public IEnumerator> GetEnumerator() + { + if (m_items?.Count > 0) + { + return m_items.GetEnumerator(); + } + else + { + return (new List>(0)).GetEnumerator(); + } + } + + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() + { + if (m_items?.Count > 0) + { + return m_items.GetEnumerator(); + } + else + { + return (new KeyValuePair[0]).GetEnumerator(); + } + } + + public void Insert( + Int32 index, + KeyValuePair item) + { + if (m_items == null) + { + m_items = new List>(); + } + + m_items.Insert(index, item); + m_dictionary = null; + } + + public void Insert( + Int32 index, + ScalarToken key, + TemplateToken value) + { + Insert(index, new KeyValuePair(key, value)); + } + + public void RemoveAt(Int32 index) + { + m_items.RemoveAt(index); + m_dictionary = null; + } + + // IReadOnlyObject (for expressions) + Boolean IReadOnlyObject.ContainsKey(String key) + { + InitializeDictionary(); + return m_dictionary.Contains(key); + } + + // IReadOnlyObject (for expressions) + IEnumerator IReadOnlyObject.GetEnumerator() + { + InitializeDictionary(); + return m_dictionary.GetEnumerator(); + } + + // IReadOnlyObject (for expressions) + Boolean IReadOnlyObject.TryGetValue( + String key, + out Object value) + { + InitializeDictionary(); + if (!m_dictionary.Contains(key)) + { + value = null; + return false; + } + + value = m_dictionary[key]; + return true; + } + + /// + /// Initializes the dictionary used for the expressions IReadOnlyObject interface + /// + private void InitializeDictionary() + { + if (m_dictionary == null) + { + m_dictionary = new OrderedDictionary(StringComparer.OrdinalIgnoreCase); + if (m_items?.Count > 0) + { + foreach (var pair in m_items) + { + if (pair.Key is StringToken stringToken && + !m_dictionary.Contains(stringToken.Value)) + { + m_dictionary.Add(stringToken.Value, pair.Value); + } + } + } + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_items?.Count == 0) + { + m_items = null; + } + } + + [DataMember(Name = "map", EmitDefaultValue = false)] + private List> m_items; + + private IDictionary m_dictionary; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NullToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NullToken.cs new file mode 100644 index 00000000000..cff91add1fd --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NullToken.cs @@ -0,0 +1,32 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class NullToken : LiteralToken, INull + { + public NullToken( + Int32? fileId, + Int32? line, + Int32? column) + : base(TokenType.Null, fileId, line, column) + { + } + + public override TemplateToken Clone(Boolean omitSource) + { + return omitSource ? new NullToken(null, null, null) : new NullToken(FileId, Line, Column); + } + + public override String ToString() + { + return String.Empty; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NumberToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NumberToken.cs new file mode 100644 index 00000000000..31d270304ad --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/NumberToken.cs @@ -0,0 +1,45 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class NumberToken : LiteralToken, INumber + { + public NumberToken( + Int32? fileId, + Int32? line, + Int32? column, + Double value) + : base(TokenType.Number, fileId, line, column) + { + m_value = value; + } + + public Double Value => m_value; + + public override TemplateToken Clone(Boolean omitSource) + { + return omitSource ? new NumberToken(null, null, null, m_value) : new NumberToken(FileId, Line, Column, m_value); + } + + public override String ToString() + { + return m_value.ToString("G15", CultureInfo.InvariantCulture); + } + + Double INumber.GetNumber() + { + return Value; + } + + [DataMember(Name = "num", EmitDefaultValue = false)] + private Double m_value; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ScalarToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ScalarToken.cs new file mode 100644 index 00000000000..81dea8e221e --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/ScalarToken.cs @@ -0,0 +1,36 @@ +using System; +using System.ComponentModel; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ScalarToken : TemplateToken + { + protected ScalarToken( + Int32 type, + Int32? fileId, + Int32? line, + Int32? column) + : base(type, fileId, line, column) + { + } + + public virtual String ToDisplayString() + { + return TrimDisplayString(ToString()); + } + + protected String TrimDisplayString(String displayString) + { + var firstLine = displayString.TrimStart(' ', '\t', '\r', '\n'); + var firstNewLine = firstLine.IndexOfAny(new[] { '\r', '\n' }); + if (firstNewLine >= 0) + { + firstLine = firstLine.Substring(0, firstNewLine); + } + return firstLine; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/SequenceToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/SequenceToken.cs new file mode 100644 index 00000000000..d5bbc6ecc03 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/SequenceToken.cs @@ -0,0 +1,151 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [JsonObject] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class SequenceToken : TemplateToken, IEnumerable, IReadOnlyArray + { + public SequenceToken( + Int32? fileId, + Int32? line, + Int32? column) + : base(TokenType.Sequence, fileId, line, column) + { + } + + public Int32 Count => m_items?.Count ?? 0; + + public TemplateToken this[Int32 index] + { + get + { + return m_items[index]; + } + + set + { + m_items[index] = value; + } + } + + // IReadOnlyArray (for expressions) + Object IReadOnlyArray.this[Int32 index] + { + get + { + return m_items[index]; + } + } + + public void Add(TemplateToken value) + { + if (m_items == null) + { + m_items = new List(); + } + + m_items.Add(value); + } + + public override TemplateToken Clone(Boolean omitSource) + { + var result = omitSource ? new SequenceToken(null, null, null) : new SequenceToken(FileId, Line, Column); + if (m_items?.Count > 0) + { + foreach (var item in m_items) + { + result.Add(item?.Clone()); + } + } + return result; + } + + public IEnumerator GetEnumerator() + { + if (m_items?.Count > 0) + { + return m_items.GetEnumerator(); + } + else + { + return (new TemplateToken[0] as IEnumerable).GetEnumerator(); + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + if (m_items?.Count > 0) + { + return m_items.GetEnumerator(); + } + else + { + return (new TemplateToken[0] as IEnumerable).GetEnumerator(); + } + } + + // IReadOnlyArray (for expressions) + IEnumerator IReadOnlyArray.GetEnumerator() + { + if (m_items?.Count > 0) + { + return m_items.GetEnumerator(); + } + else + { + return (new TemplateToken[0] as IEnumerable).GetEnumerator(); + } + } + + public void Insert( + Int32 index, + TemplateToken item) + { + if (m_items == null) + { + m_items = new List(); + } + + m_items.Insert(index, item); + } + + public void InsertRange( + Int32 index, + IEnumerable items) + { + if (m_items == null) + { + m_items = new List(); + } + + m_items.InsertRange(index, items); + } + + public void RemoveAt(Int32 index) + { + m_items.RemoveAt(index); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_items?.Count == 0) + { + m_items = null; + } + } + + [DataMember(Name = "seq", EmitDefaultValue = false)] + private List m_items; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/StringToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/StringToken.cs new file mode 100644 index 00000000000..308a51a385b --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/StringToken.cs @@ -0,0 +1,64 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class StringToken : LiteralToken, IString + { + public StringToken( + Int32? fileId, + Int32? line, + Int32? column, + String value) + : base(TokenType.String, fileId, line, column) + { + m_value = value; + } + + public String Value + { + get + { + if (m_value == null) + { + m_value = String.Empty; + } + + return m_value; + } + } + + public override TemplateToken Clone(Boolean omitSource) + { + return omitSource ? new StringToken(null, null, null, m_value) : new StringToken(FileId, Line, Column, m_value); + } + + public override String ToString() + { + return m_value ?? String.Empty; + } + + String IString.GetString() + { + return Value; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_value?.Length == 0) + { + m_value = null; + } + } + + [DataMember(Name = "lit", EmitDefaultValue = false)] + private String m_value; + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateToken.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateToken.cs new file mode 100644 index 00000000000..afe46356fd9 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateToken.cs @@ -0,0 +1,290 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + /// + /// Base class for all template tokens + /// + [DataContract] + [JsonConverter(typeof(TemplateTokenJsonConverter))] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class TemplateToken + { + protected TemplateToken( + Int32 type, + Int32? fileId, + Int32? line, + Int32? column) + { + Type = type; + FileId = fileId; + Line = line; + Column = column; + } + + [IgnoreDataMember] + internal Int32? FileId { get; set; } + + [DataMember(Name = "line", EmitDefaultValue = false)] + internal Int32? Line { get; } + + [DataMember(Name = "col", EmitDefaultValue = false)] + internal Int32? Column { get; } + + [DataMember(Name = "type", EmitDefaultValue = false)] + internal Int32 Type { get; } + + public TemplateToken Clone() + { + return Clone(false); + } + + public abstract TemplateToken Clone(Boolean omitSource); + + protected StringToken EvaluateStringToken( + TemplateContext context, + String expression, + out Int32 bytes) + { + var originalBytes = context.Memory.CurrentBytes; + try + { + var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions); + var options = new EvaluationOptions + { + MaxMemory = context.Memory.MaxBytes, + }; + var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options); + + if (result.Raw is LiteralToken literalToken) + { + var stringToken = new StringToken(FileId, Line, Column, literalToken.ToString()); + context.Memory.AddBytes(stringToken); + return stringToken; + } + + if (!result.IsPrimitive) + { + context.Error(this, "Expected a string"); + return CreateStringToken(context, expression); + } + + var stringValue = result.Kind == ValueKind.Null ? String.Empty : result.ConvertToString(); + return CreateStringToken(context, stringValue); + } + finally + { + bytes = context.Memory.CurrentBytes - originalBytes; + } + } + + protected SequenceToken EvaluateSequenceToken( + TemplateContext context, + String expression, + out Int32 bytes) + { + var originalBytes = context.Memory.CurrentBytes; + try + { + var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions); + var options = new EvaluationOptions + { + MaxMemory = context.Memory.MaxBytes, + }; + var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options); + var templateToken = ConvertToTemplateToken(context, result); + if (templateToken is SequenceToken sequence) + { + return sequence; + } + + context.Error(this, TemplateStrings.ExpectedSequence()); + return CreateSequenceToken(context); + } + finally + { + bytes = context.Memory.CurrentBytes - originalBytes; + } + } + + protected MappingToken EvaluateMappingToken( + TemplateContext context, + String expression, + out Int32 bytes) + { + var originalBytes = context.Memory.CurrentBytes; + try + { + var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions); + var options = new EvaluationOptions + { + MaxMemory = context.Memory.MaxBytes, + }; + var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options); + var templateToken = ConvertToTemplateToken(context, result); + if (templateToken is MappingToken mapping) + { + return mapping; + } + + context.Error(this, TemplateStrings.ExpectedMapping()); + return CreateMappingToken(context); + } + finally + { + bytes = context.Memory.CurrentBytes - originalBytes; + } + } + + protected TemplateToken EvaluateTemplateToken( + TemplateContext context, + String expression, + out Int32 bytes) + { + var originalBytes = context.Memory.CurrentBytes; + try + { + var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions); + var options = new EvaluationOptions + { + MaxMemory = context.Memory.MaxBytes, + }; + var result = tree.Evaluate(context.TraceWriter.ToExpressionTraceWriter(), null, context, options); + return ConvertToTemplateToken(context, result); + } + finally + { + bytes = context.Memory.CurrentBytes - originalBytes; + } + } + + private TemplateToken ConvertToTemplateToken( + TemplateContext context, + EvaluationResult result) + { + // Literal + if (TryConvertToLiteralToken(context, result, out LiteralToken literal)) + { + return literal; + } + // Known raw types + else if (!Object.ReferenceEquals(result.Raw, null)) + { + if (result.Raw is SequenceToken sequence) + { + context.Memory.AddBytes(sequence, true); + return sequence; + } + else if (result.Raw is MappingToken mapping) + { + context.Memory.AddBytes(mapping, true); + return mapping; + } + } + + // Leverage the expression SDK to traverse the object + if (result.TryGetCollectionInterface(out Object collection)) + { + if (collection is IReadOnlyObject dictionary) + { + var mapping = CreateMappingToken(context); + + foreach (KeyValuePair pair in dictionary) + { + var keyToken = CreateStringToken(context, pair.Key); + var valueResult = EvaluationResult.CreateIntermediateResult(null, pair.Value); + var valueToken = ConvertToTemplateToken(context, valueResult); + mapping.Add(keyToken, valueToken); + } + + return mapping; + } + else if (collection is IReadOnlyArray list) + { + var sequence = CreateSequenceToken(context); + + foreach (var item in list) + { + var itemResult = EvaluationResult.CreateIntermediateResult(null, item); + var itemToken = ConvertToTemplateToken(context, itemResult); + sequence.Add(itemToken); + } + + return sequence; + } + } + + throw new ArgumentException(TemplateStrings.UnableToConvertToTemplateToken(result.Value?.GetType().FullName)); + } + + private Boolean TryConvertToLiteralToken( + TemplateContext context, + EvaluationResult result, + out LiteralToken literal) + { + if (result.Raw is LiteralToken literal2) + { + context.Memory.AddBytes(literal2); + literal = literal2; + return true; + } + + switch (result.Kind) + { + case ValueKind.Null: + literal = new NullToken(FileId, Line, Column); + break; + + case ValueKind.Boolean: + literal = new BooleanToken(FileId, Line, Column, (Boolean)result.Value); + break; + + case ValueKind.Number: + literal = new NumberToken(FileId, Line, Column, (Double)result.Value); + break; + + case ValueKind.String: + literal = new StringToken(FileId, Line, Column, (String)result.Value); + break; + + default: + literal = null; + return false; + } + + context.Memory.AddBytes(literal); + return true; + } + + private StringToken CreateStringToken( + TemplateContext context, + String value) + { + var result = new StringToken(FileId, Line, Column, value); + context.Memory.AddBytes(result); + return result; + } + + private SequenceToken CreateSequenceToken(TemplateContext context) + { + var result = new SequenceToken(FileId, Line, Column); + context.Memory.AddBytes(result); + return result; + } + + private MappingToken CreateMappingToken(TemplateContext context) + { + var result = new MappingToken(FileId, Line, Column); + context.Memory.AddBytes(result); + return result; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenExtensions.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenExtensions.cs new file mode 100644 index 00000000000..7b368404e81 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenExtensions.cs @@ -0,0 +1,221 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + public static class TemplateTokenExtensions + { + internal static BooleanToken AssertBoolean( + this TemplateToken value, + string objectDescription) + { + if (value is BooleanToken booleanToken) + { + return booleanToken; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(BooleanToken)}' was expected."); + } + + internal static NullToken AssertNull( + this TemplateToken value, + string objectDescription) + { + if (value is NullToken nullToken) + { + return nullToken; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(NullToken)}' was expected."); + } + + internal static NumberToken AssertNumber( + this TemplateToken value, + string objectDescription) + { + if (value is NumberToken numberToken) + { + return numberToken; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(NumberToken)}' was expected."); + } + + internal static StringToken AssertString( + this TemplateToken value, + string objectDescription) + { + if (value is StringToken stringToken) + { + return stringToken; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(StringToken)}' was expected."); + } + + internal static MappingToken AssertMapping( + this TemplateToken value, + string objectDescription) + { + if (value is MappingToken mapping) + { + return mapping; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(MappingToken)}' was expected."); + } + + internal static void AssertNotEmpty( + this MappingToken mapping, + string objectDescription) + { + if (mapping.Count == 0) + { + throw new ArgumentException($"Unexpected empty mapping when reading '{objectDescription}'"); + } + } + + internal static ScalarToken AssertScalar( + this TemplateToken value, + string objectDescription) + { + if (value is ScalarToken scalar) + { + return scalar; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(ScalarToken)}' was expected."); + } + + internal static SequenceToken AssertSequence( + this TemplateToken value, + string objectDescription) + { + if (value is SequenceToken sequence) + { + return sequence; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(SequenceToken)}' was expected."); + } + + internal static void AssertUnexpectedValue( + this LiteralToken literal, + string objectDescription) + { + throw new ArgumentException($"Error while reading '{objectDescription}'. Unexpected value '{literal.ToString()}'"); + } + + /// + /// Returns all tokens (depth first) + /// + public static IEnumerable Traverse(this TemplateToken token) + { + return Traverse(token, omitKeys: false); + } + + /// + /// Returns all tokens (depth first) + /// + public static IEnumerable Traverse( + this TemplateToken token, + bool omitKeys) + { + if (token != null) + { + yield return token; + + if (token is SequenceToken || token is MappingToken) + { + var state = new TraversalState(null, token); + while (state != null) + { + if (state.MoveNext(omitKeys)) + { + token = state.Current; + yield return token; + + if (token is SequenceToken || token is MappingToken) + { + state = new TraversalState(state, token); + } + } + else + { + state = state.Parent; + } + } + } + } + } + + private sealed class TraversalState + { + public TraversalState( + TraversalState parent, + TemplateToken token) + { + Parent = parent; + m_token = token; + } + + public bool MoveNext(bool omitKeys) + { + switch (m_token.Type) + { + case TokenType.Sequence: + var sequence = m_token as SequenceToken; + if (++m_index < sequence.Count) + { + Current = sequence[m_index]; + return true; + } + else + { + Current = null; + return false; + } + + case TokenType.Mapping: + var mapping = m_token as MappingToken; + + // Return the value + if (m_isKey) + { + m_isKey = false; + Current = mapping[m_index].Value; + return true; + } + + if (++m_index < mapping.Count) + { + // Skip the key, return the value + if (omitKeys) + { + m_isKey = false; + Current = mapping[m_index].Value; + return true; + } + + // Return the key + m_isKey = true; + Current = mapping[m_index].Key; + return true; + } + + Current = null; + return false; + + default: + throw new NotSupportedException($"Unexpected token type '{m_token.Type}'"); + } + } + + private TemplateToken m_token; + private int m_index = -1; + private bool m_isKey; + public TemplateToken Current; + public TraversalState Parent; + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenJsonConverter.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenJsonConverter.cs new file mode 100644 index 00000000000..1520942bed9 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TemplateTokenJsonConverter.cs @@ -0,0 +1,332 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + /// + /// JSON serializer for TemplateToken objects + /// + internal sealed class TemplateTokenJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return true; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(TemplateToken).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + switch (reader.TokenType) + { + case JsonToken.String: + return new StringToken(null, null, null, reader.Value.ToString()); + case JsonToken.Boolean: + return new BooleanToken(null, null, null, (Boolean)reader.Value); + case JsonToken.Float: + return new NumberToken(null, null, null, (Double)reader.Value); + case JsonToken.Integer: + return new NumberToken(null, null, null, (Double)(Int64)reader.Value); + case JsonToken.Null: + return new NullToken(null, null, null); + case JsonToken.StartObject: + break; + default: + return null; + } + + Int32? type = null; + JObject value = JObject.Load(reader); + if (!value.TryGetValue("type", StringComparison.OrdinalIgnoreCase, out JToken typeValue)) + { + type = TokenType.String; + } + else if (typeValue.Type == JTokenType.Integer) + { + type = (Int32)typeValue; + } + else + { + return existingValue; + } + + Object newValue = null; + switch (type) + { + case TokenType.Null: + newValue = new NullToken(null, null, null); + break; + + case TokenType.Boolean: + newValue = new BooleanToken(null, null, null, default(Boolean)); + break; + + case TokenType.Number: + newValue = new NumberToken(null, null, null, default(Double)); + break; + + case TokenType.String: + newValue = new StringToken(null, null, null, null); + break; + + case TokenType.BasicExpression: + newValue = new BasicExpressionToken(null, null, null, null); + break; + + case TokenType.InsertExpression: + newValue = new InsertExpressionToken(null, null, null); + break; + + case TokenType.Sequence: + newValue = new SequenceToken(null, null, null); + break; + + case TokenType.Mapping: + newValue = new MappingToken(null, null, null); + break; + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + if (value is TemplateToken token) + { + switch (token.Type) + { + case TokenType.Null: + if (token.Line == null && token.Column == null) + { + writer.WriteNull(); + } + else + { + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + writer.WriteEndObject(); + } + return; + + case TokenType.Boolean: + var booleanToken = token as BooleanToken; + if (token.Line == null && token.Column == null) + { + writer.WriteValue(booleanToken.Value); + } + else + { + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + writer.WritePropertyName("bool"); + writer.WriteValue(booleanToken.Value); + writer.WriteEndObject(); + } + return; + + case TokenType.Number: + var numberToken = token as NumberToken; + if (token.Line == null && token.Column == null) + { + writer.WriteValue(numberToken.Value); + } + else + { + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + writer.WritePropertyName("num"); + writer.WriteValue(numberToken.Value); + writer.WriteEndObject(); + } + return; + + case TokenType.String: + var stringToken = token as StringToken; + if (token.Line == null && token.Column == null) + { + writer.WriteValue(stringToken.Value); + } + else + { + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + writer.WritePropertyName("lit"); + writer.WriteValue(stringToken.Value); + writer.WriteEndObject(); + } + return; + + case TokenType.BasicExpression: + var basicExpressionToken = token as BasicExpressionToken; + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + if (!String.IsNullOrEmpty(basicExpressionToken.Expression)) + { + writer.WritePropertyName("expr"); + writer.WriteValue(basicExpressionToken.Expression); + } + writer.WriteEndObject(); + return; + + case TokenType.InsertExpression: + var insertExpressionToken = token as InsertExpressionToken; + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + writer.WritePropertyName("directive"); + writer.WriteValue(insertExpressionToken.Directive); + writer.WriteEndObject(); + return; + + case TokenType.Sequence: + var sequenceToken = token as SequenceToken; + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + if (sequenceToken.Count > 0) + { + writer.WritePropertyName("seq"); + writer.WriteStartArray(); + foreach (var item in sequenceToken) + { + serializer.Serialize(writer, item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + return; + + case TokenType.Mapping: + var mappingToken = token as MappingToken; + writer.WriteStartObject(); + writer.WritePropertyName("type"); + writer.WriteValue(token.Type); + if (token.Line != null) + { + writer.WritePropertyName("line"); + writer.WriteValue(token.Line); + } + if (token.Line != null) + { + writer.WritePropertyName("col"); + writer.WriteValue(token.Column); + } + if (mappingToken.Count > 0) + { + writer.WritePropertyName("map"); + writer.WriteStartArray(); + foreach (var item in mappingToken) + { + serializer.Serialize(writer, item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + return; + } + } + + throw new NotSupportedException($"Unexpected type '{value?.GetType().FullName}' when serializing template token"); + } + } +} diff --git a/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TokenType.cs b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TokenType.cs new file mode 100644 index 00000000000..932b9de6055 --- /dev/null +++ b/src/Sdk/DTObjectTemplating/ObjectTemplating/Tokens/TokenType.cs @@ -0,0 +1,23 @@ +using System; + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ + internal static class TokenType + { + internal const Int32 String = 0; + + internal const Int32 Sequence = 1; + + internal const Int32 Mapping = 2; + + internal const Int32 BasicExpression = 3; + + internal const Int32 InsertExpression = 4; + + internal const Int32 Boolean = 5; + + internal const Int32 Number = 6; + + internal const Int32 Null = 7; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ActionStep.cs b/src/Sdk/DTPipelines/Pipelines/ActionStep.cs new file mode 100644 index 00000000000..ab152d67a67 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ActionStep.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class ActionStep : JobStep + { + [JsonConstructor] + public ActionStep() + { + } + + private ActionStep(ActionStep actionToClone) + : base(actionToClone) + { + this.Reference = actionToClone.Reference?.Clone(); + + Environment = actionToClone.Environment?.Clone(); + Inputs = actionToClone.Inputs?.Clone(); + ContextName = actionToClone?.ContextName; + ScopeName = actionToClone?.ScopeName; + DisplayNameToken = actionToClone.DisplayNameToken?.Clone(); + } + + public override StepType Type => StepType.Action; + + [DataMember] + public ActionStepDefinitionReference Reference + { + get; + set; + } + + // TODO: After TFS and legacy phases/steps/ect are removed, lets replace the DisplayName in the base class with this value and remove this additional prop + [DataMember(EmitDefaultValue = false)] + public TemplateToken DisplayNameToken { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String ScopeName { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String ContextName { get; set; } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken Environment { get; set; } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken Inputs { get; set; } + + public override Step Clone() + { + return new ActionStep(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReference.cs b/src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReference.cs new file mode 100644 index 00000000000..eb089d33fa6 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReference.cs @@ -0,0 +1,153 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public enum ActionSourceType + { + [DataMember] + Repository = 1, + + [DataMember] + ContainerRegistry = 2, + + [DataMember] + Script = 3 + } + + [DataContract] + [KnownType(typeof(ContainerRegistryReference))] + [KnownType(typeof(RepositoryPathReference))] + [KnownType(typeof(ScriptReference))] + [JsonConverter(typeof(ActionStepDefinitionReferenceConverter))] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ActionStepDefinitionReference + { + [DataMember(EmitDefaultValue = false)] + public abstract ActionSourceType Type { get; } + + public abstract ActionStepDefinitionReference Clone(); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class ContainerRegistryReference : ActionStepDefinitionReference + { + [JsonConstructor] + public ContainerRegistryReference() + { + } + + private ContainerRegistryReference(ContainerRegistryReference referenceToClone) + { + this.Image = referenceToClone.Image; + } + + [DataMember(EmitDefaultValue = false)] + public override ActionSourceType Type => ActionSourceType.ContainerRegistry; + + /// + /// Container image + /// + [DataMember(EmitDefaultValue = false)] + public string Image + { + get; + set; + } + + public override ActionStepDefinitionReference Clone() + { + return new ContainerRegistryReference(this); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class RepositoryPathReference : ActionStepDefinitionReference + { + [JsonConstructor] + public RepositoryPathReference() + { + } + + private RepositoryPathReference(RepositoryPathReference referenceToClone) + { + this.Name = referenceToClone.Name; + this.Ref = referenceToClone.Ref; + this.RepositoryType = referenceToClone.RepositoryType; + this.Path = referenceToClone.Path; + } + + [DataMember(EmitDefaultValue = false)] + public override ActionSourceType Type => ActionSourceType.Repository; + + /// + /// Repository name + /// + [DataMember(EmitDefaultValue = false)] + public string Name + { + get; + set; + } + + /// + /// Repository ref, branch/tag/commit + /// + [DataMember(EmitDefaultValue = false)] + public string Ref + { + get; + set; + } + + /// + /// Repository type, github/AzureRepo/etc + /// + [DataMember(EmitDefaultValue = false)] + public string RepositoryType + { + get; + set; + } + + /// + /// Path to action entry point directory + /// + [DataMember(EmitDefaultValue = false)] + public string Path + { + get; + set; + } + + public override ActionStepDefinitionReference Clone() + { + return new RepositoryPathReference(this); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class ScriptReference : ActionStepDefinitionReference + { + [JsonConstructor] + public ScriptReference() + { + } + + private ScriptReference(ScriptReference referenceToClone) + { + } + + [DataMember(EmitDefaultValue = false)] + public override ActionSourceType Type => ActionSourceType.Script; + + public override ActionStepDefinitionReference Clone() + { + return new ScriptReference(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReferenceConverter.cs b/src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReferenceConverter.cs new file mode 100644 index 00000000000..49431ad43c3 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ActionStepDefinitionReferenceConverter.cs @@ -0,0 +1,82 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + internal sealed class ActionStepDefinitionReferenceConverter : VssSecureJsonConverter + { + public override bool CanWrite + { + get + { + return false; + } + } + + public override bool CanConvert(Type objectType) + { + return typeof(Step).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + JObject value = JObject.Load(reader); + if (value.TryGetValue("Type", StringComparison.OrdinalIgnoreCase, out JToken actionTypeValue)) + { + ActionSourceType actionType; + if (actionTypeValue.Type == JTokenType.Integer) + { + actionType = (ActionSourceType)(Int32)actionTypeValue; + } + else if (actionTypeValue.Type != JTokenType.String || !Enum.TryParse((String)actionTypeValue, true, out actionType)) + { + return null; + } + + ActionStepDefinitionReference reference = null; + switch (actionType) + { + case ActionSourceType.Repository: + reference = new RepositoryPathReference(); + break; + + case ActionSourceType.ContainerRegistry: + reference = new ContainerRegistryReference(); + break; + + case ActionSourceType.Script: + reference = new ScriptReference(); + break; + } + + using (var objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, reference); + } + + return reference; + } + else + { + return null; + } + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs new file mode 100644 index 00000000000..130b85293d1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessage.cs @@ -0,0 +1,397 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class AgentJobRequestMessage + { + [JsonConstructor] + internal AgentJobRequestMessage() + { + } + + /// + /// Job request message sent to the runner + /// + /// Hierarchy of environment variables to overlay, last wins. + public AgentJobRequestMessage( + TaskOrchestrationPlanReference plan, + TimelineReference timeline, + Guid jobId, + String jobDisplayName, + String jobName, + TemplateToken jobContainer, + TemplateToken jobServiceContainers, + IList environmentVariables, + IDictionary variables, + IList maskHints, + JobResources jobResources, + DictionaryContextData contextData, + WorkspaceOptions workspaceOptions, + IEnumerable steps, + IEnumerable scopes) + { + this.MessageType = JobRequestMessageTypes.PipelineAgentJobRequest; + this.Plan = plan; + this.JobId = jobId; + this.JobDisplayName = jobDisplayName; + this.JobName = jobName; + this.JobContainer = jobContainer; + this.JobServiceContainers = jobServiceContainers; + this.Timeline = timeline; + this.Resources = jobResources; + this.Workspace = workspaceOptions; + + m_variables = new Dictionary(variables, StringComparer.OrdinalIgnoreCase); + m_maskHints = new List(maskHints); + m_steps = new List(steps); + + if (scopes != null) + { + m_scopes = new List(scopes); + } + + if (environmentVariables?.Count > 0) + { + m_environmentVariables = new List(environmentVariables); + } + + this.ContextData = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (contextData?.Count > 0) + { + foreach (var pair in contextData) + { + this.ContextData[pair.Key] = pair.Value; + } + } + } + + [DataMember] + public String MessageType + { + get; + private set; + } + + [DataMember] + public TaskOrchestrationPlanReference Plan + { + get; + private set; + } + + [DataMember] + public TimelineReference Timeline + { + get; + private set; + } + + [DataMember] + public Guid JobId + { + get; + private set; + } + + [DataMember] + public String JobDisplayName + { + get; + private set; + } + + [DataMember] + public String JobName + { + get; + private set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken JobContainer + { + get; + private set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken JobServiceContainers + { + get; + private set; + } + + [DataMember] + public Int64 RequestId + { + get; + internal set; + } + + [DataMember] + public DateTime LockedUntil + { + get; + internal set; + } + + [DataMember] + public JobResources Resources + { + get; + private set; + } + + [DataMember(EmitDefaultValue = false)] + [EditorBrowsable(EditorBrowsableState.Never)] + public IDictionary ContextData + { + get; + private set; + } + + [DataMember(EmitDefaultValue = false)] + public WorkspaceOptions Workspace + { + get; + private set; + } + + /// + /// Gets the collection of mask hints + /// + public List MaskHints + { + get + { + if (m_maskHints == null) + { + m_maskHints = new List(); + } + return m_maskHints; + } + } + + /// + /// Gets the hierarchy of environment variables to overlay, last wins. + /// + public IList EnvironmentVariables + { + get + { + if (m_environmentVariables == null) + { + m_environmentVariables = new List(); + } + return m_environmentVariables; + } + } + + /// + /// Gets the collection of variables associated with the current context. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_variables; + } + } + + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + public IList Scopes + { + get + { + if (m_scopes == null) + { + m_scopes = new List(); + } + return m_scopes; + } + } + + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + public void SetJobSidecarContainers(IDictionary value) + { + m_jobSidecarContainers = value; + } + + public TaskAgentMessage GetAgentMessage() + { + var body = JsonUtility.ToString(this); + + return new TaskAgentMessage + { + Body = body, + MessageType = JobRequestMessageTypes.PipelineAgentJobRequest + }; + } + + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + internal static TemplateToken ConvertToTemplateToken(ContainerResource resource) + { + var result = new MappingToken(null, null, null); + + var image = resource.Image; + if (!string.IsNullOrEmpty(image)) + { + result.Add(new StringToken(null, null, null, "image"), new StringToken(null, null, null, image)); + } + + var options = resource.Options; + if (!string.IsNullOrEmpty(options)) + { + result.Add(new StringToken(null, null, null, "options"), new StringToken(null, null, null, options)); + } + + var environment = resource.Environment; + if (environment?.Count > 0) + { + var mapping = new MappingToken(null, null, null); + foreach (var pair in environment) + { + mapping.Add(new StringToken(null, null, null, pair.Key), new StringToken(null, null, null, pair.Value)); + } + result.Add(new StringToken(null, null, null, "env"), mapping); + } + + var ports = resource.Ports; + if (ports?.Count > 0) + { + var sequence = new SequenceToken(null, null, null); + foreach (var item in ports) + { + sequence.Add(new StringToken(null, null, null, item)); + } + result.Add(new StringToken(null, null, null, "ports"), sequence); + } + + var volumes = resource.Volumes; + if (volumes?.Count > 0) + { + var sequence = new SequenceToken(null, null, null); + foreach (var item in volumes) + { + sequence.Add(new StringToken(null, null, null, item)); + } + result.Add(new StringToken(null, null, null, "volumes"), sequence); + } + + return result; + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + if (JobContainer is StringToken jobContainerStringToken) + { + var resourceAlias = jobContainerStringToken.Value; + var resource = Resources?.Containers.SingleOrDefault(x => string.Equals(x.Alias, resourceAlias, StringComparison.OrdinalIgnoreCase)); + if (resource != null) + { + JobContainer = ConvertToTemplateToken(resource); + m_jobContainerResourceAlias = resourceAlias; + } + } + + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + if (m_jobSidecarContainers?.Count > 0 && (JobServiceContainers == null || JobServiceContainers.Type == TokenType.Null)) + { + var services = new MappingToken(null, null, null); + foreach (var pair in m_jobSidecarContainers) + { + var networkAlias = pair.Key; + var serviceResourceAlias = pair.Value; + var serviceResource = Resources.Containers.Single(x => string.Equals(x.Alias, serviceResourceAlias, StringComparison.OrdinalIgnoreCase)); + services.Add(new StringToken(null, null, null, networkAlias), ConvertToTemplateToken(serviceResource)); + } + JobServiceContainers = services; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_environmentVariables?.Count == 0) + { + m_environmentVariables = null; + } + + if (m_maskHints?.Count == 0) + { + m_maskHints = null; + } + else if (m_maskHints != null) + { + m_maskHints = new List(this.m_maskHints.Distinct()); + } + + if (m_scopes?.Count == 0) + { + m_scopes = null; + } + + if (m_variables?.Count == 0) + { + m_variables = null; + } + + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + if (!string.IsNullOrEmpty(m_jobContainerResourceAlias)) + { + JobContainer = new StringToken(null, null, null, m_jobContainerResourceAlias); + } + } + + [DataMember(Name = "EnvironmentVariables", EmitDefaultValue = false)] + private List m_environmentVariables; + + [DataMember(Name = "Mask", EmitDefaultValue = false)] + private List m_maskHints; + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private List m_steps; + + [DataMember(Name = "Scopes", EmitDefaultValue = false)] + private List m_scopes; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_variables; + + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + [DataMember(Name = "JobSidecarContainers", EmitDefaultValue = false)] + private IDictionary m_jobSidecarContainers; + + // todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere + [IgnoreDataMember] + private string m_jobContainerResourceAlias; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessageUtil.cs b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessageUtil.cs new file mode 100644 index 00000000000..a6d7fe489a8 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentJobRequestMessageUtil.cs @@ -0,0 +1,769 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using System.Text.RegularExpressions; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class AgentJobRequestMessageUtil + { + // Legacy JobRequestMessage -> Pipeline JobRequestMessage + // Used by the agent when the latest version agent connect to old version TFS + // Used by the server when common method only take the new Message contact, like, telemetry logging + public static AgentJobRequestMessage Convert(WebApi.AgentJobRequestMessage message) + { + // construct steps + List jobSteps = new List(); + foreach (var task in message.Tasks) + { + TaskStep taskStep = new TaskStep(task); + jobSteps.Add(taskStep); + } + + Dictionary variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + HashSet maskHints = new HashSet(); + JobResources jobResources = new JobResources(); + WorkspaceOptions workspace = new WorkspaceOptions(); + message.Environment.Extract(variables, maskHints, jobResources); + + // convert repository endpoint into checkout task for Build + if (string.Equals(message.Plan.PlanType, "Build", StringComparison.OrdinalIgnoreCase)) + { + // repositoryId was added sometime after TFS2015, so we need to fall back to find endpoint using endpoint type. + var legacyRepoEndpoint = jobResources.Endpoints.FirstOrDefault(x => x.Data.ContainsKey("repositoryId")); + if (legacyRepoEndpoint == null) + { + legacyRepoEndpoint = jobResources.Endpoints.FirstOrDefault(x => x.Type == LegacyRepositoryTypes.Bitbucket || x.Type == LegacyRepositoryTypes.Git || x.Type == LegacyRepositoryTypes.TfsGit || x.Type == LegacyRepositoryTypes.GitHub || x.Type == LegacyRepositoryTypes.GitHubEnterprise || x.Type == LegacyRepositoryTypes.TfsVersionControl); + } + + // build retention job will not have a repo endpoint. + if (legacyRepoEndpoint != null) + { + // construct checkout task + var checkoutStep = new TaskStep(); + checkoutStep.Id = Guid.NewGuid(); + checkoutStep.DisplayName = PipelineConstants.CheckoutTask.FriendlyName; + checkoutStep.Name = "__system_checkout"; + checkoutStep.Reference = new TaskStepDefinitionReference() + { + Id = PipelineConstants.CheckoutTask.Id, + Name = PipelineConstants.CheckoutTask.Name, + Version = PipelineConstants.CheckoutTask.Version, + }; + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Repository] = "__legacy_repo_endpoint"; + + // construct self repository resource + var defaultRepo = new RepositoryResource(); + defaultRepo.Alias = "__legacy_repo_endpoint"; + defaultRepo.Properties.Set(RepositoryPropertyNames.Name, legacyRepoEndpoint.Name); + legacyRepoEndpoint.Data.TryGetValue("repositoryId", out string repositoryId); + if (!string.IsNullOrEmpty(repositoryId)) + { + defaultRepo.Id = repositoryId; + } + else + { + defaultRepo.Id = "__legacy_repo_endpoint"; + } + + defaultRepo.Endpoint = new ServiceEndpointReference() + { + Id = Guid.Empty, + Name = legacyRepoEndpoint.Name + }; + defaultRepo.Type = ConvertLegacySourceType(legacyRepoEndpoint.Type); + defaultRepo.Url = legacyRepoEndpoint.Url; + if (variables.TryGetValue("build.sourceVersion", out VariableValue sourceVersion) && !string.IsNullOrEmpty(sourceVersion?.Value)) + { + defaultRepo.Version = sourceVersion.Value; + } + if (variables.TryGetValue("build.sourceBranch", out VariableValue sourceBranch) && !string.IsNullOrEmpty(sourceBranch?.Value)) + { + defaultRepo.Properties.Set(RepositoryPropertyNames.Ref, sourceBranch.Value); + } + + VersionInfo versionInfo = null; + if (variables.TryGetValue("build.sourceVersionAuthor", out VariableValue sourceAuthor) && !string.IsNullOrEmpty(sourceAuthor?.Value)) + { + versionInfo = new VersionInfo(); + versionInfo.Author = sourceAuthor.Value; + } + if (variables.TryGetValue("build.sourceVersionMessage", out VariableValue sourceMessage) && !string.IsNullOrEmpty(sourceMessage?.Value)) + { + if (versionInfo == null) + { + versionInfo = new VersionInfo(); + } + versionInfo.Message = sourceMessage.Value; + } + if (versionInfo != null) + { + defaultRepo.Properties.Set(RepositoryPropertyNames.VersionInfo, versionInfo); + } + + if (defaultRepo.Type == RepositoryTypes.Tfvc) + { + if (variables.TryGetValue("build.sourceTfvcShelveset", out VariableValue shelveset) && !string.IsNullOrEmpty(shelveset?.Value)) + { + defaultRepo.Properties.Set(RepositoryPropertyNames.Shelveset, shelveset.Value); + } + + var legacyTfvcMappingJson = legacyRepoEndpoint.Data["tfvcWorkspaceMapping"]; + var legacyTfvcMapping = JsonUtility.FromString(legacyTfvcMappingJson); + if (legacyTfvcMapping != null) + { + IList tfvcMapping = new List(); + foreach (var mapping in legacyTfvcMapping.Mappings) + { + tfvcMapping.Add(new WorkspaceMapping() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, Exclude = String.Equals(mapping.MappingType, "cloak", StringComparison.OrdinalIgnoreCase) }); + } + + defaultRepo.Properties.Set>(RepositoryPropertyNames.Mappings, tfvcMapping); + } + } + else if (defaultRepo.Type == RepositoryTypes.Svn) + { + var legacySvnMappingJson = legacyRepoEndpoint.Data["svnWorkspaceMapping"]; + var legacySvnMapping = JsonUtility.FromString(legacySvnMappingJson); + if (legacySvnMapping != null) + { + IList svnMapping = new List(); + foreach (var mapping in legacySvnMapping.Mappings) + { + svnMapping.Add(new WorkspaceMapping() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, Depth = mapping.Depth, IgnoreExternals = mapping.IgnoreExternals, Revision = mapping.Revision }); + } + + defaultRepo.Properties.Set>(RepositoryPropertyNames.Mappings, svnMapping); + } + } + + legacyRepoEndpoint.Data.TryGetValue("clean", out string cleanString); + if (!string.IsNullOrEmpty(cleanString)) + { + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Clean] = cleanString; + } + else + { + // Checkout task has clean set tp false as default. + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Clean] = Boolean.FalseString; + } + + if (legacyRepoEndpoint.Data.TryGetValue("checkoutSubmodules", out string checkoutSubmodulesString) && + Boolean.TryParse(checkoutSubmodulesString, out Boolean checkoutSubmodules) && + checkoutSubmodules) + { + if (legacyRepoEndpoint.Data.TryGetValue("checkoutNestedSubmodules", out string nestedSubmodulesString) && + Boolean.TryParse(nestedSubmodulesString, out Boolean nestedSubmodules) && + nestedSubmodules) + { + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Submodules] = PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.Recursive; + } + else + { + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Submodules] = PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.True; + } + } + + if (legacyRepoEndpoint.Data.ContainsKey("fetchDepth")) + { + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.FetchDepth] = legacyRepoEndpoint.Data["fetchDepth"]; + } + + if (legacyRepoEndpoint.Data.ContainsKey("gitLfsSupport")) + { + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.Lfs] = legacyRepoEndpoint.Data["gitLfsSupport"]; + } + + if (VariableUtility.GetEnableAccessTokenType(variables) == EnableAccessTokenType.Variable) + { + checkoutStep.Inputs[PipelineConstants.CheckoutTaskInputs.PersistCredentials] = Boolean.TrueString; + } + + // construct worksapce option + if (Boolean.TryParse(cleanString, out Boolean clean) && clean) + { + if (legacyRepoEndpoint.Data.TryGetValue("cleanOptions", out string cleanOptionsString) && !string.IsNullOrEmpty(cleanOptionsString)) + { + if (string.Equals(cleanOptionsString, "1", StringComparison.OrdinalIgnoreCase)) //RepositoryCleanOptions.SourceAndOutputDir + { + workspace.Clean = PipelineConstants.WorkspaceCleanOptions.Outputs; + } + else if (string.Equals(cleanOptionsString, "2", StringComparison.OrdinalIgnoreCase)) //RepositoryCleanOptions.SourceDir + { + workspace.Clean = PipelineConstants.WorkspaceCleanOptions.Resources; + } + else if (string.Equals(cleanOptionsString, "3", StringComparison.OrdinalIgnoreCase)) //RepositoryCleanOptions.AllBuildDir + { + workspace.Clean = PipelineConstants.WorkspaceCleanOptions.All; + } + } + } + + // add checkout task when build.syncsources and skipSyncSource not set + variables.TryGetValue("build.syncSources", out VariableValue syncSourcesVariable); + legacyRepoEndpoint.Data.TryGetValue("skipSyncSource", out string skipSyncSource); + if (!string.IsNullOrEmpty(syncSourcesVariable?.Value) && Boolean.TryParse(syncSourcesVariable?.Value, out bool syncSource) && !syncSource) + { + checkoutStep.Condition = bool.FalseString; + } + else if (Boolean.TryParse(skipSyncSource, out bool skipSource) && skipSource) + { + checkoutStep.Condition = bool.FalseString; + } + + jobSteps.Insert(0, checkoutStep); + + // always add self repository to job resource + jobResources.Repositories.Add(defaultRepo); + } + } + + AgentJobRequestMessage agentRequestMessage = new AgentJobRequestMessage(message.Plan, message.Timeline, message.JobId, message.JobName, message.JobRefName, null, null, null, variables, maskHints.ToList(), jobResources, null, workspace, jobSteps, null) + { + RequestId = message.RequestId + }; + + return agentRequestMessage; + } + + // Pipeline JobRequestMessage -> Legacy JobRequestMessage + // Used by the server when the connected agent is old version and doesn't support new contract yet. + public static WebApi.AgentJobRequestMessage Convert(AgentJobRequestMessage message) + { + // Old agent can't handle container(s) + if (message.JobContainer != null) + { + throw new NotSupportedException("Job containers are not supported"); + } + if (message.JobServiceContainers != null) + { + throw new NotSupportedException("Job service containers are not supported"); + } + + // Old agent can't handle more than 1 repository + if (message.Resources.Repositories.Count > 1) + { + throw new NotSupportedException(string.Join(", ", message.Resources.Repositories.Select(x => x.Alias))); + } + + // Old agent can't handle more than 1 checkout task + if (message.Steps.Where(x => x.IsCheckoutTask()).Count() > 1) + { + throw new NotSupportedException(PipelineConstants.CheckoutTask.Id.ToString("D")); + } + + // construct tasks + List tasks = new List(); + foreach (var step in message.Steps) + { + // Pipeline builder should add min agent demand when steps contains group + if (step.Type != StepType.Task) + { + throw new NotSupportedException(step.Type.ToString()); + } + + // don't add checkout task, we need to convert the checkout task into endpoint + if (!step.IsCheckoutTask()) + { + TaskInstance task = (step as TaskStep).ToLegacyTaskInstance(); + tasks.Add(task); + } + } + + if (message.Resources != null) + { + foreach (var endpoint in message.Resources.Endpoints) + { + // Legacy message require all endpoint's name equals to endpoint's id + // Guid.Empty is for repository endpoints + if (!String.Equals(endpoint.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase) && + endpoint.Id != Guid.Empty) + { + endpoint.Name = endpoint.Id.ToString("D"); + } + } + + // Make sure we propagate download ticket into the mask hints + foreach (var secureFile in message.Resources.SecureFiles) + { + if (!String.IsNullOrEmpty(secureFile.Ticket)) + { + message.MaskHints.Add(new MaskHint() { Type = MaskType.Regex, Value = Regex.Escape(secureFile.Ticket) }); + } + } + } + + if (String.Equals(message.Plan.PlanType, "Build", StringComparison.OrdinalIgnoreCase)) + { + // create repository endpoint base on checkout task + repository resource + repository endpoint + // repoResource might be null when environment verion is still on 1 + var repoResource = message.Resources?.Repositories.SingleOrDefault(); + if (repoResource != null) + { + var legacyRepoEndpoint = new ServiceEndpoint(); + legacyRepoEndpoint.Name = repoResource.Properties.Get(RepositoryPropertyNames.Name); + legacyRepoEndpoint.Type = ConvertToLegacySourceType(repoResource.Type); + legacyRepoEndpoint.Url = repoResource.Url; + if (repoResource.Endpoint != null) + { + var referencedEndpoint = message.Resources.Endpoints.First(x => (x.Id == repoResource.Endpoint.Id && x.Id != Guid.Empty) || (String.Equals(x.Name, repoResource.Endpoint.Name?.Literal, StringComparison.OrdinalIgnoreCase) && x.Id == Guid.Empty && repoResource.Endpoint.Id == Guid.Empty)); + var endpointAuthCopy = referencedEndpoint.Authorization?.Clone(); + if (endpointAuthCopy != null) + { + if (endpointAuthCopy.Scheme == EndpointAuthorizationSchemes.Token) //InstallationToken (Tabby) or ApiToken (GithubEnterprise) + { + if (referencedEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out string accessToken)) //Tabby + { + legacyRepoEndpoint.Authorization = new EndpointAuthorization() + { + Scheme = EndpointAuthorizationSchemes.UsernamePassword, + Parameters = + { + { EndpointAuthorizationParameters.Username, "x-access-token" }, + { EndpointAuthorizationParameters.Password, accessToken } + } + }; + } + else if (referencedEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.ApiToken, out string apiToken)) //GithubEnterprise + { + legacyRepoEndpoint.Authorization = new EndpointAuthorization() + { + Scheme = EndpointAuthorizationSchemes.UsernamePassword, + Parameters = + { + { EndpointAuthorizationParameters.Username, apiToken }, + { EndpointAuthorizationParameters.Password, "x-oauth-basic" } + } + }; + } + } + else if (endpointAuthCopy.Scheme == EndpointAuthorizationSchemes.PersonalAccessToken) // Github + { + if (referencedEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out string accessToken)) //Tabby + { + legacyRepoEndpoint.Authorization = new EndpointAuthorization() + { + Scheme = EndpointAuthorizationSchemes.UsernamePassword, + Parameters = + { + { EndpointAuthorizationParameters.Username, "pat" }, + { EndpointAuthorizationParameters.Password, accessToken } + } + }; + } + } + else + { + legacyRepoEndpoint.Authorization = endpointAuthCopy; + } + } + + // there are 2 properties we put into the legacy repo endpoint directly from connect endpoint + if (referencedEndpoint.Data.TryGetValue("acceptUntrustedCerts", out String acceptUntrustedCerts)) + { + legacyRepoEndpoint.Data["acceptUntrustedCerts"] = acceptUntrustedCerts; + } + if (referencedEndpoint.Data.TryGetValue("realmName", out String realmName)) + { + legacyRepoEndpoint.Data["realmName"] = realmName; + } + } + legacyRepoEndpoint.Data["repositoryId"] = repoResource.Id; + + // default values in the old message format + legacyRepoEndpoint.Data["clean"] = Boolean.FalseString; + legacyRepoEndpoint.Data["checkoutSubmodules"] = Boolean.FalseString; + legacyRepoEndpoint.Data["checkoutNestedSubmodules"] = Boolean.FalseString; + legacyRepoEndpoint.Data["fetchDepth"] = "0"; + legacyRepoEndpoint.Data["gitLfsSupport"] = Boolean.FalseString; + legacyRepoEndpoint.Data["skipSyncSource"] = Boolean.FalseString; + legacyRepoEndpoint.Data["cleanOptions"] = "0"; + legacyRepoEndpoint.Data["rootFolder"] = null; // old tfvc repo endpoint has this set to $/foo, but it doesn't seems to be used at all. + + if (repoResource.Type == RepositoryTypes.Tfvc) + { + var tfvcMapping = repoResource.Properties.Get>(RepositoryPropertyNames.Mappings); + if (tfvcMapping != null) + { + LegacyBuildWorkspace legacyMapping = new LegacyBuildWorkspace(); + foreach (var mapping in tfvcMapping) + { + legacyMapping.Mappings.Add(new LegacyMappingDetails() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, MappingType = mapping.Exclude ? "cloak" : "map" }); + } + + legacyRepoEndpoint.Data["tfvcWorkspaceMapping"] = JsonUtility.ToString(legacyMapping); + } + } + else if (repoResource.Type == RepositoryTypes.Svn) + { + var svnMapping = repoResource.Properties.Get>(RepositoryPropertyNames.Mappings); + if (svnMapping != null) + { + LegacySvnWorkspace legacyMapping = new LegacySvnWorkspace(); + foreach (var mapping in svnMapping) + { + legacyMapping.Mappings.Add(new LegacySvnMappingDetails() { ServerPath = mapping.ServerPath, LocalPath = mapping.LocalPath, Depth = mapping.Depth, IgnoreExternals = mapping.IgnoreExternals, Revision = mapping.Revision }); + } + + legacyRepoEndpoint.Data["svnWorkspaceMapping"] = JsonUtility.ToString(legacyMapping); + } + } + else if (repoResource.Type == RepositoryTypes.Git) + { + if (message.Variables.TryGetValue(WellKnownDistributedTaskVariables.ServerType, out VariableValue serverType) && String.Equals(serverType?.Value, "Hosted", StringComparison.OrdinalIgnoreCase)) + { + legacyRepoEndpoint.Data["onpremtfsgit"] = Boolean.FalseString; + } + else + { + legacyRepoEndpoint.Data["onpremtfsgit"] = Boolean.TrueString; + } + } + + if (!message.Variables.ContainsKey("build.repository.id") || String.IsNullOrEmpty(message.Variables["build.repository.id"]?.Value)) + { + message.Variables["build.repository.id"] = repoResource.Id; + } + if (!message.Variables.ContainsKey("build.repository.name") || String.IsNullOrEmpty(message.Variables["build.repository.name"]?.Value)) + { + message.Variables["build.repository.name"] = repoResource.Properties.Get(RepositoryPropertyNames.Name); + } + if (!message.Variables.ContainsKey("build.repository.uri") || String.IsNullOrEmpty(message.Variables["build.repository.uri"]?.Value)) + { + message.Variables["build.repository.uri"] = repoResource.Url.AbsoluteUri; + } + + var versionInfo = repoResource.Properties.Get(RepositoryPropertyNames.VersionInfo); + if (!message.Variables.ContainsKey("build.sourceVersionAuthor") || String.IsNullOrEmpty(message.Variables["build.sourceVersionAuthor"]?.Value)) + { + message.Variables["build.sourceVersionAuthor"] = versionInfo?.Author; + } + if (!message.Variables.ContainsKey("build.sourceVersionMessage") || String.IsNullOrEmpty(message.Variables["build.sourceVersionMessage"]?.Value)) + { + message.Variables["build.sourceVersionMessage"] = versionInfo?.Message; + } + if (!message.Variables.ContainsKey("build.sourceVersion") || String.IsNullOrEmpty(message.Variables["build.sourceVersion"]?.Value)) + { + message.Variables["build.sourceVersion"] = repoResource.Version; + } + if (!message.Variables.ContainsKey("build.sourceBranch") || String.IsNullOrEmpty(message.Variables["build.sourceBranch"]?.Value)) + { + message.Variables["build.sourceBranch"] = repoResource.Properties.Get(RepositoryPropertyNames.Ref); + } + if (repoResource.Type == RepositoryTypes.Tfvc) + { + var shelveset = repoResource.Properties.Get(RepositoryPropertyNames.Shelveset); + if (!String.IsNullOrEmpty(shelveset) && (!message.Variables.ContainsKey("build.sourceTfvcShelveset") || String.IsNullOrEmpty(message.Variables["build.sourceTfvcShelveset"]?.Value))) + { + message.Variables["build.sourceTfvcShelveset"] = shelveset; + } + } + + TaskStep checkoutTask = message.Steps.FirstOrDefault(x => x.IsCheckoutTask()) as TaskStep; + if (checkoutTask != null) + { + if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Clean, out string taskInputClean) && !string.IsNullOrEmpty(taskInputClean)) + { + legacyRepoEndpoint.Data["clean"] = taskInputClean; + } + else + { + legacyRepoEndpoint.Data["clean"] = Boolean.FalseString; + } + + if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Submodules, out string taskInputSubmodules) && !string.IsNullOrEmpty(taskInputSubmodules)) + { + legacyRepoEndpoint.Data["checkoutSubmodules"] = Boolean.TrueString; + if (String.Equals(taskInputSubmodules, PipelineConstants.CheckoutTaskInputs.SubmodulesOptions.Recursive, StringComparison.OrdinalIgnoreCase)) + { + legacyRepoEndpoint.Data["checkoutNestedSubmodules"] = Boolean.TrueString; + } + } + + if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.FetchDepth, out string taskInputFetchDepth) && !string.IsNullOrEmpty(taskInputFetchDepth)) + { + legacyRepoEndpoint.Data["fetchDepth"] = taskInputFetchDepth; + } + + if (checkoutTask.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Lfs, out string taskInputfs) && !string.IsNullOrEmpty(taskInputfs)) + { + legacyRepoEndpoint.Data["gitLfsSupport"] = taskInputfs; + } + + // Skip sync sources + if (String.Equals(checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Repository], PipelineConstants.NoneAlias, StringComparison.OrdinalIgnoreCase)) + { + legacyRepoEndpoint.Data["skipSyncSource"] = Boolean.TrueString; + } + else if (String.Equals(checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Repository], PipelineConstants.DesignerRepo, StringComparison.OrdinalIgnoreCase) && checkoutTask.Condition == Boolean.FalseString) + { + legacyRepoEndpoint.Data["skipSyncSource"] = Boolean.TrueString; + } + } + + // workspace clean options + legacyRepoEndpoint.Data["cleanOptions"] = "0"; // RepositoryCleanOptions.Source; + if (message.Workspace != null) + { + if (String.Equals(message.Workspace.Clean, PipelineConstants.WorkspaceCleanOptions.Outputs, StringComparison.OrdinalIgnoreCase)) + { + legacyRepoEndpoint.Data["cleanOptions"] = "1"; // RepositoryCleanOptions.SourceAndOutputDir; + } + else if (String.Equals(message.Workspace.Clean, PipelineConstants.WorkspaceCleanOptions.Resources, StringComparison.OrdinalIgnoreCase)) + { + legacyRepoEndpoint.Data["cleanOptions"] = "2"; //RepositoryCleanOptions.SourceDir; + } + else if (String.Equals(message.Workspace.Clean, PipelineConstants.WorkspaceCleanOptions.All, StringComparison.OrdinalIgnoreCase)) + { + legacyRepoEndpoint.Data["cleanOptions"] = "3"; // RepositoryCleanOptions.AllBuildDir; + } + } + + // add reposiotry endpoint to environment + message.Resources.Endpoints.Add(legacyRepoEndpoint); + } + } + + JobEnvironment environment = new JobEnvironment(message.Variables, message.MaskHints, message.Resources); + + WebApi.AgentJobRequestMessage legacyAgentRequestMessage = new WebApi.AgentJobRequestMessage(message.Plan, message.Timeline, message.JobId, message.JobDisplayName, message.JobName, environment, tasks) + { + RequestId = message.RequestId + }; + + return legacyAgentRequestMessage; + } + + private static string ConvertLegacySourceType(string legacySourceType) + { + if (String.Equals(legacySourceType, LegacyRepositoryTypes.Bitbucket, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.Bitbucket; + } + else if (String.Equals(legacySourceType, LegacyRepositoryTypes.Git, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.ExternalGit; + } + else if (String.Equals(legacySourceType, LegacyRepositoryTypes.TfsGit, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.Git; + } + else if (String.Equals(legacySourceType, LegacyRepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.GitHub; + } + else if (String.Equals(legacySourceType, LegacyRepositoryTypes.GitHubEnterprise, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.GitHubEnterprise; + } + else if (String.Equals(legacySourceType, LegacyRepositoryTypes.Svn, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.Svn; + } + else if (String.Equals(legacySourceType, LegacyRepositoryTypes.TfsVersionControl, StringComparison.OrdinalIgnoreCase)) + { + return RepositoryTypes.Tfvc; + } + else + { + throw new NotSupportedException(legacySourceType); + } + } + + private static string ConvertToLegacySourceType(string pipelineSourceType) + { + if (String.Equals(pipelineSourceType, RepositoryTypes.Bitbucket, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.Bitbucket; + } + else if (String.Equals(pipelineSourceType, RepositoryTypes.ExternalGit, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.Git; + } + else if (String.Equals(pipelineSourceType, RepositoryTypes.Git, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.TfsGit; + } + else if (String.Equals(pipelineSourceType, RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.GitHub; + } + else if (String.Equals(pipelineSourceType, RepositoryTypes.GitHubEnterprise, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.GitHubEnterprise; + } + else if (String.Equals(pipelineSourceType, RepositoryTypes.Svn, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.Svn; + } + else if (String.Equals(pipelineSourceType, RepositoryTypes.Tfvc, StringComparison.OrdinalIgnoreCase)) + { + return LegacyRepositoryTypes.TfsVersionControl; + } + else + { + throw new NotSupportedException(pipelineSourceType); + } + } + + private static class LegacyRepositoryTypes // Copy from Build.Webapi + { + public const String TfsVersionControl = "TfsVersionControl"; + public const String TfsGit = "TfsGit"; + public const String Git = "Git"; + public const String GitHub = "GitHub"; + public const String GitHubEnterprise = "GitHubEnterprise"; + public const String Bitbucket = "Bitbucket"; + public const String Svn = "Svn"; + } + + /// + /// Represents an entry in a workspace mapping. + /// + [DataContract] + private class LegacyMappingDetails + { + /// + /// The server path. + /// + [DataMember(Name = "serverPath")] + public String ServerPath + { + get; + set; + } + + /// + /// The mapping type. + /// + [DataMember(Name = "mappingType")] + public String MappingType + { + get; + set; + } + + /// + /// The local path. + /// + [DataMember(Name = "localPath")] + public String LocalPath + { + get; + set; + } + } + + /// + /// Represents a workspace mapping. + /// + [DataContract] + private class LegacyBuildWorkspace + { + /// + /// The list of workspace mapping entries. + /// + public List Mappings + { + get + { + if (m_mappings == null) + { + m_mappings = new List(); + } + return m_mappings; + } + } + + [DataMember(Name = "mappings")] + private List m_mappings; + } + + /// + /// Represents a Subversion mapping entry. + /// + [DataContract] + private class LegacySvnMappingDetails + { + /// + /// The server path. + /// + [DataMember(Name = "serverPath")] + public String ServerPath + { + get; + set; + } + + /// + /// The local path. + /// + [DataMember(Name = "localPath")] + public String LocalPath + { + get; + set; + } + + /// + /// The revision. + /// + [DataMember(Name = "revision")] + public String Revision + { + get; + set; + } + + /// + /// The depth. + /// + [DataMember(Name = "depth")] + public Int32 Depth + { + get; + set; + } + + /// + /// Indicates whether to ignore externals. + /// + [DataMember(Name = "ignoreExternals")] + public bool IgnoreExternals + { + get; + set; + } + } + + /// + /// Represents a subversion workspace. + /// + [DataContract] + private class LegacySvnWorkspace + { + /// + /// The list of mappings. + /// + public List Mappings + { + get + { + if (m_Mappings == null) + { + m_Mappings = new List(); + } + return m_Mappings; + } + } + + [DataMember(Name = "mappings")] + private List m_Mappings; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentPoolReference.cs b/src/Sdk/DTPipelines/Pipelines/AgentPoolReference.cs new file mode 100644 index 00000000000..9fd06febee9 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentPoolReference.cs @@ -0,0 +1,38 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class AgentPoolReference : ResourceReference + { + public AgentPoolReference() + { + } + + private AgentPoolReference(AgentPoolReference referenceToCopy) + : base(referenceToCopy) + { + this.Id = referenceToCopy.Id; + } + + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + public AgentPoolReference Clone() + { + return new AgentPoolReference(this); + } + + public override String ToString() + { + return base.ToString() ?? this.Id.ToString(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentPoolStore.cs b/src/Sdk/DTPipelines/Pipelines/AgentPoolStore.cs new file mode 100644 index 00000000000..e745c85ad02 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentPoolStore.cs @@ -0,0 +1,116 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class AgentPoolStore : IAgentPoolStore + { + public AgentPoolStore( + IList pools, + IAgentPoolResolver resolver = null) + { + this.Resolver = resolver; + Add(pools?.ToArray()); + } + + /// + /// Get the queue resolver configured for this store. + /// + public IAgentPoolResolver Resolver + { + get; + } + + public void Authorize(IList pools) + { + if (pools?.Count > 0) + { + foreach (var pool in pools) + { + var authorizedResource = this.Resolver?.Resolve(pool); + if (authorizedResource != null) + { + Add(authorizedResource); + } + } + } + } + + public IList GetAuthorizedReferences() + { + return m_resourcesById.Values.Select(x => new AgentPoolReference { Id = x.Id }).ToList(); + } + + public TaskAgentPool Get(AgentPoolReference reference) + { + if (reference == null) + { + return null; + } + + var referenceId = reference.Id; + var referenceName = reference.Name?.Literal; + if (reference.Id == 0 && String.IsNullOrEmpty(referenceName)) + { + return null; + } + + TaskAgentPool authorizedResource = null; + if (referenceId != 0) + { + if (m_resourcesById.TryGetValue(referenceId, out authorizedResource)) + { + return authorizedResource; + } + } + else if (!String.IsNullOrEmpty(referenceName)) + { + if (m_resourcesByName.TryGetValue(referenceName, out authorizedResource)) + { + return authorizedResource; + } + } + + // If we have an authorizer then attempt to authorize the reference for use + authorizedResource = this.Resolver?.Resolve(reference); + if (authorizedResource != null) + { + Add(authorizedResource); + } + + return authorizedResource; + } + + private void Add(params TaskAgentPool[] resources) + { + if (resources?.Length > 0) + { + foreach (var resource in resources) + { + // Track by ID + if (m_resourcesById.TryGetValue(resource.Id, out _)) + { + continue; + } + + m_resourcesById.Add(resource.Id, resource); + + // Track by name + if (m_resourcesByName.TryGetValue(resource.Name, out _)) + { + continue; + } + + m_resourcesByName.Add(resource.Name, resource); + } + } + } + + private readonly Dictionary m_resourcesById = new Dictionary(); + private readonly Dictionary m_resourcesByName = new Dictionary(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentPoolTarget.cs b/src/Sdk/DTPipelines/Pipelines/AgentPoolTarget.cs new file mode 100644 index 00000000000..6e1125b4e9a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentPoolTarget.cs @@ -0,0 +1,169 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class AgentPoolTarget : PhaseTarget + { + public AgentPoolTarget() + : base(PhaseTargetType.Pool) + { + } + + private AgentPoolTarget(AgentPoolTarget targetToClone) + : base(targetToClone) + { + this.Pool = targetToClone.Pool?.Clone(); + + + if (targetToClone.AgentSpecification != null) + { + this.AgentSpecification = new JObject(targetToClone.AgentSpecification); + } + + if (targetToClone.m_agentIds?.Count > 0) + { + this.m_agentIds = targetToClone.m_agentIds; + } + } + + /// + /// Gets or sets the target pool from which agents will be selected. + /// + [DataMember(EmitDefaultValue = false)] + public AgentPoolReference Pool + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public JObject AgentSpecification + { + get; + set; + } + + /// + /// Gets agent Ids filter on which deployment should be done. + /// + public List AgentIds + { + get + { + if (m_agentIds == null) + { + m_agentIds = new List(); + } + return m_agentIds; + } + } + + public override PhaseTarget Clone() + { + return new AgentPoolTarget(this); + } + + public override Boolean IsValid(TaskDefinition task) + { + ArgumentUtility.CheckForNull(task, nameof(task)); + return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnAgent, StringComparer.OrdinalIgnoreCase); + } + + internal override void Validate( + IPipelineContext context, + BuildOptions buildOptions, + ValidationResult result, + IList steps, + ISet taskDemands) + { + // validate pool + Int32 poolId = 0; + String poolName = null; + var pool = this.Pool; + if (pool != null) + { + poolId = pool.Id; + poolName = pool.Name?.GetValue(context)?.Value; + } + + if (poolId == 0 && String.IsNullOrEmpty(poolName) && buildOptions.ValidateResources) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotDefined())); + } + else + { + // we have a valid queue. record the reference + result.AddPoolReference(poolId, poolName); + + // Attempt to resolve the queue using any identifier specified. We will look up by either ID + // or name and the ID is preferred since it is immutable and more specific. + if (buildOptions.ValidateResources) + { + TaskAgentPool taskAgentPool = null; + var resourceStore = context.ResourceStore; + if (resourceStore != null) + { + if (poolId != 0) + { + taskAgentPool = resourceStore.GetPool(poolId); + if (taskAgentPool == null) + { + result.UnauthorizedResources.Pools.Add(new AgentPoolReference { Id = poolId }); + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFound(poolId))); + } + } + else if (!String.IsNullOrEmpty(poolName)) + { + taskAgentPool = resourceStore.GetPool(poolName); + if (taskAgentPool == null) + { + result.UnauthorizedResources.Pools.Add(new AgentPoolReference { Name = poolName }); + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFound(poolName))); + } + } + } + + // Store the resolved values inline to the resolved resource for this validation run + if (taskAgentPool != null) + { + this.Pool.Id = taskAgentPool.Id; + this.Pool.Name = taskAgentPool.Name; + } + } + } + } + + internal override JobExecutionContext CreateJobContext(PhaseExecutionContext context, string jobName, int attempt, bool continueOnError, int timeoutInMinutes, int cancelTimeoutInMinutes, IJobFactory jobFactory) + { + throw new NotSupportedException(nameof(AgentPoolTarget)); + } + + internal override ExpandPhaseResult Expand(PhaseExecutionContext context, bool continueOnError, int timeoutInMinutes, int cancelTimeoutInMinutes, IJobFactory jobFactory, JobExpansionOptions options) + { + throw new NotSupportedException(nameof(AgentPoolTarget)); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_agentIds?.Count == 0) + { + m_agentIds = null; + } + } + + [DataMember(Name = "AgentIds", EmitDefaultValue = false)] + private List m_agentIds; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentQueueReference.cs b/src/Sdk/DTPipelines/Pipelines/AgentQueueReference.cs new file mode 100644 index 00000000000..9e80f0bcdcb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentQueueReference.cs @@ -0,0 +1,38 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class AgentQueueReference : ResourceReference + { + public AgentQueueReference() + { + } + + private AgentQueueReference(AgentQueueReference referenceToCopy) + : base(referenceToCopy) + { + this.Id = referenceToCopy.Id; + } + + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + public AgentQueueReference Clone() + { + return new AgentQueueReference(this); + } + + public override String ToString() + { + return base.ToString() ?? this.Id.ToString(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentQueueStore.cs b/src/Sdk/DTPipelines/Pipelines/AgentQueueStore.cs new file mode 100644 index 00000000000..b6f8582e429 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentQueueStore.cs @@ -0,0 +1,161 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class AgentQueueStore : IAgentQueueStore + { + public AgentQueueStore( + IList queues, + IAgentQueueResolver resolver = null) + { + this.Resolver = resolver; + Add(queues?.ToArray()); + } + + /// + /// Get the queue resolver configured for this store. + /// + public IAgentQueueResolver Resolver + { + get; + } + + public void Authorize(IList queues) + { + if (queues?.Count > 0) + { + foreach (var queue in queues) + { + Add(queue); + } + } + } + + public IList GetAuthorizedReferences() + { + return m_resourcesById.Values.Select(x => new AgentQueueReference { Id = x.Id }).ToList(); + } + + public TaskAgentQueue Get(AgentQueueReference reference) + { + if (reference == null) + { + return null; + } + + var referenceId = reference.Id; + var referenceName = reference.Name?.Literal; + if (reference.Id == 0 && String.IsNullOrEmpty(referenceName)) + { + return null; + } + + TaskAgentQueue authorizedResource = null; + if (referenceId != 0) + { + if (m_resourcesById.TryGetValue(referenceId, out authorizedResource)) + { + return authorizedResource; + } + } + else if (!String.IsNullOrEmpty(referenceName)) + { + if (m_resourcesByName.TryGetValue(referenceName, out List matchingResources)) + { + if (matchingResources.Count > 1) + { + throw new AmbiguousResourceSpecificationException(PipelineStrings.AmbiguousServiceEndpointSpecification(referenceId)); + } + + return matchingResources[0]; + } + } + + // If we have an authorizer then attempt to authorize the reference for use + authorizedResource = this.Resolver?.Resolve(reference); + if (authorizedResource != null) + { + Add(authorizedResource); + } + + return authorizedResource; + } + + private void Add(params TaskAgentQueue[] resources) + { + if (resources?.Length > 0) + { + foreach (var resource in resources) + { + // Track by ID + if (m_resourcesById.TryGetValue(resource.Id, out _)) + { + continue; + } + + m_resourcesById.Add(resource.Id, resource); + + // not all references have names + var name = resource.Name; + if (string.IsNullOrWhiteSpace(name)) + { + continue; + } + + // Track by name + if (!m_resourcesByName.TryGetValue(name, out var list)) + { + list = new List(); + m_resourcesByName.Add(name, list); + } + + // Clobber previously added alternate name, with the real hosted queue. + // For example, during the "Hosted macOS High Sierra" transition, until the real queue + // existed, it was treated as an alternate name for the "Hosted macOS" queue. After the + // real "Hosted macOS High Sierra" queue was created, it took priority. + if (list.Count > 0 && list[0].Pool?.IsHosted == true && resource.Pool?.IsHosted == true) + { + list[0] = resource; + } + // Otherwise add the queue + else + { + list.Add(resource); + } + + // Track by alternate name for specific hosted pools. + // For example, "Hosted macOS Preview" and "Hosted macOS" are equivalent. + if (resource.Pool?.IsHosted == true && s_alternateNames.TryGetValue(name, out var alternateNames)) + { + foreach (var alternateName in alternateNames) + { + if (!m_resourcesByName.TryGetValue(alternateName, out list)) + { + list = new List(); + m_resourcesByName.Add(alternateName, list); + } + + if (list.Count == 0 || list[0].Pool?.IsHosted != true) + { + list.Add(resource); + } + } + } + } + } + } + + private static readonly Dictionary s_alternateNames = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { "Hosted macOS", new[] { "Hosted macOS Preview" } }, + { "Hosted macOS Preview", new[] { "Hosted macOS" } }, + }; + private readonly Dictionary m_resourcesById = new Dictionary(); + private readonly Dictionary> m_resourcesByName = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/AgentQueueTarget.cs b/src/Sdk/DTPipelines/Pipelines/AgentQueueTarget.cs new file mode 100644 index 00000000000..938009af401 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/AgentQueueTarget.cs @@ -0,0 +1,647 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides options for phase execution on an agent within a queue. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class AgentQueueTarget : PhaseTarget + { + public AgentQueueTarget() + : base(PhaseTargetType.Queue) + { + } + + private AgentQueueTarget(AgentQueueTarget targetToClone) + : base(targetToClone) + { + this.Queue = targetToClone.Queue?.Clone(); + this.Execution = targetToClone.Execution?.Clone(); + + if (targetToClone.AgentSpecification != null) + { + this.AgentSpecification = new JObject(targetToClone.AgentSpecification); + } + + if (targetToClone.SidecarContainers?.Count > 0) + { + m_sidecarContainers = new Dictionary>(targetToClone.SidecarContainers, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Gets or sets the target queue from which agents will be selected. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(QueueJsonConverter))] + public AgentQueueReference Queue + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public JObject AgentSpecification + { + get; + set; + } + + /// + /// Gets or sets parallel execution options which control expansion and execution of the phase. + /// + [DataMember(EmitDefaultValue = false)] + public ParallelExecutionOptions Execution + { + get; + set; + } + + /// + /// Gets or sets workspace options which control how agent manage the workspace of the phase. + /// + [DataMember(EmitDefaultValue = false)] + public WorkspaceOptions Workspace + { + get; + set; + } + + /// + /// Gets or sets the container the phase will be run in. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue Container + { + get; + set; + } + + /// + /// Gets the sidecar containers that will run alongside the phase. + /// + public IDictionary> SidecarContainers + { + get + { + if (m_sidecarContainers == null) + { + m_sidecarContainers = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } + return m_sidecarContainers; + } + } + + public override PhaseTarget Clone() + { + return new AgentQueueTarget(this); + } + + public override Boolean IsValid(TaskDefinition task) + { + ArgumentUtility.CheckForNull(task, nameof(task)); + return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnAgent, StringComparer.OrdinalIgnoreCase); + } + + /// + /// Creates a clone of this and attempts to resolve all expressions and macros. + /// + internal AgentQueueTarget Evaluate( + IPipelineContext context, + ValidationResult result) + { + var qname = String.Empty; + try + { + qname = context.ExpandVariables(this.Queue?.Name?.GetValue(context).Value); + } + catch (DistributedTask.Expressions.ExpressionException ee) + { + result.Errors.Add(new PipelineValidationError(ee.Message)); + return null; + } + + var literalTarget = this.Clone() as AgentQueueTarget; + + var spec = this.AgentSpecification; + if (spec != null) + { + spec = context.Evaluate(this.AgentSpecification).Value; + literalTarget.AgentSpecification = spec; + } + + // Note! The "vmImage" token of the agent spec is currently treated specially. + // This is a temporary relationship that allows vmImage agent specs to specify + // the hosted pool to use. + // It would be better to factor out this work into a separate, plug-in validator. + if (String.IsNullOrEmpty(qname) && spec != null) + { + const string VMImage = "vmImage"; // should be: YamlConstants.VMImage, which is inaccessible :( + spec.TryGetValue(VMImage, out var token); + if (token != null && token.Type == JTokenType.String) + { + var rawTokenValue = token.Value(); + var resolvedPoolName = PoolNameForVMImage(rawTokenValue); + if (resolvedPoolName == null) + { + result.Errors.Add(new PipelineValidationError($"Unexpected vmImage '{rawTokenValue}'")); + return null; + } + else + { + spec.Remove(VMImage); + literalTarget.Queue = new AgentQueueReference + { + Name = resolvedPoolName + }; + } + } + } + else + { + literalTarget.Queue.Name = qname; + } + + return literalTarget; + } + + /// + /// returns true for strings structured like expressions or macros. + /// they could techincally be literals though. + /// + internal static Boolean IsProbablyExpressionOrMacro(String s) + { + return ExpressionValue.IsExpression(s) || VariableUtility.IsVariable(s); + } + + /// + /// returns true if this model is composed only of literal values (no expressions) + /// + internal Boolean IsLiteral() + { + var queue = this.Queue; + if (queue != null) + { + var queueName = queue.Name; + if (queueName != null) + { + if (!queueName.IsLiteral || VariableUtility.IsVariable(queueName.Literal)) + { + return false; + } + } + } + + var spec = this.AgentSpecification; + if (spec != null) + { + bool IsLiteral(JObject o) + { + foreach (var pair in o) + { + switch (pair.Value.Type) + { + case JTokenType.String: + if (IsProbablyExpressionOrMacro(pair.Value.Value())) + { + return false; + } + break; + case JTokenType.Object: + if (!IsLiteral(pair.Value.Value())) + { + return false; + } + break; + default: + break; + } + } + + return true; + } + + if (!IsLiteral(spec)) + { + return false; + } + } + + return true; + } + + /// + /// Temporary code to translate vmImage. Pool providers work will move this to a different layer + /// + /// + /// Hosted pool name + internal static String PoolNameForVMImage(String vmImageValue) + { + switch ((vmImageValue ?? String.Empty).ToUpperInvariant()) + { + case "UBUNTU 16.04": + case "UBUNTU-16.04": + case "UBUNTU LATEST": + case "UBUNTU-LATEST": + return "Hosted Ubuntu 1604"; + case "UBUNTU 18.04": + case "UBUNTU-18.04": + return "Hosted Ubuntu 1804"; + case "VISUAL STUDIO 2015 ON WINDOWS SERVER 2012R2": + case "VS2015-WIN2012R2": + return "Hosted"; + case "VISUAL STUDIO 2017 ON WINDOWS SERVER 2016": + case "VS2017-WIN2016": + return "Hosted VS2017"; + case "WINDOWS-2019-VS2019": + case "WINDOWS-2019": + case "WINDOWS LATEST": + case "WINDOWS-LATEST": + return "Hosted Windows 2019 with VS2019"; + case "WINDOWS SERVER 1803": + case "WIN1803": + return "Hosted Windows Container"; + case "MACOS 10.13": + case "MACOS-10.13": + case "XCODE 9 ON MACOS 10.13": + case "XCODE9-MACOS10.13": + case "XCODE 10 ON MACOS 10.13": + case "XCODE10-MACOS10.13": + return "Hosted macOS High Sierra"; + case "MACOS 10.14": + case "MACOS-10.14": + case "MACOS LATEST": + case "MACOS-LATEST": + return "Hosted macOS"; + default: + return null; + } + } + + /// + /// PipelineBuildContexts have build options. + /// GraphExecutionContexts have dependencies. + /// We might need either depending on the situation. + /// + private TaskAgentPoolReference ValidateQueue( + IPipelineContext context, + ValidationResult result, + BuildOptions buildOptions) + { + var queueId = 0; + var queueName = (String)null; + var queueNameIsUnresolvableExpression = false; // true iff Name is an expression, we're allowed to use them, and it has no current value + var queue = this.Queue; + if (queue != null) + { + queueId = queue.Id; + + // resolve name + var expressionValueName = queue.Name; + if (expressionValueName != null && (buildOptions.EnableResourceExpressions || expressionValueName.IsLiteral)) + { + // resolve expression + try + { + queueName = expressionValueName.GetValue(context).Value; + queueNameIsUnresolvableExpression = !expressionValueName.IsLiteral && String.IsNullOrEmpty(queueName); + } + catch (Exception ee) + { + // something bad happened trying to fetch the value. + // We do not really care what though. Just record the error and move on. + queueName = null; + + if (buildOptions.ValidateExpressions && buildOptions.ValidateResources) + { + result.Errors.Add(new PipelineValidationError(ee.Message)); + } + } + + // resolve name macro + if (buildOptions.EnableResourceExpressions && queueName != null && VariableUtility.IsVariable(queueName)) + { + queueName = context.ExpandVariables(queueName); + if (VariableUtility.IsVariable(queueName)) + { + // name appears to be a macro that is not defined. + queueNameIsUnresolvableExpression = true; + } + } + } + } + + if (queueNameIsUnresolvableExpression || (queueId == 0 && String.IsNullOrEmpty(queueName))) + { + // could not determine what queue user was talking about + if (!buildOptions.AllowEmptyQueueTarget && buildOptions.ValidateResources) + { + // expression-based queue names are allowed to be unresolved at compile time. + // TEMPORARY: literal queue names do not error at compile time if special keys exist + if (!queueNameIsUnresolvableExpression || buildOptions.ValidateExpressions) + { + if (!String.IsNullOrEmpty(queueName)) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFoundByName(queueName))); + } + else + { + var expressionValueName = queue?.Name; + if (expressionValueName == null || expressionValueName.IsLiteral) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotDefined())); + } + else if (expressionValueName != null) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFoundByName(expressionValueName.Expression))); + } + } + } + } + } + else + { + // we have a valid queue. record the reference + result.AddQueueReference(id: queueId, name: queueName); + + // Attempt to resolve the queue using any identifier specified. We will look up by either ID + // or name and the ID is preferred since it is immutable and more specific. + if (buildOptions.ValidateResources) + { + TaskAgentQueue taskAgentQueue = null; + var resourceStore = context.ResourceStore; + if (resourceStore != null) + { + if (queueId != 0) + { + taskAgentQueue = resourceStore.GetQueue(queueId); + if (taskAgentQueue == null) + { + result.UnauthorizedResources.Queues.Add(new AgentQueueReference { Id = queueId }); + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFound(queueId))); + } + } + else if (!String.IsNullOrEmpty(queueName)) + { + taskAgentQueue = resourceStore.GetQueue(queueName); + if (taskAgentQueue == null) + { + result.UnauthorizedResources.Queues.Add(new AgentQueueReference { Name = queueName }); + result.Errors.Add(new PipelineValidationError(PipelineStrings.QueueNotFoundByName(queueName))); + } + } + } + + // Store the resolved values inline to the resolved resource for this validation run + if (taskAgentQueue != null) + { + this.Queue.Id = taskAgentQueue.Id; + return taskAgentQueue.Pool; + } + } + } + + return null; + } + + internal override void Validate( + IPipelineContext context, + BuildOptions buildOptions, + ValidationResult result, + IList steps, + ISet taskDemands) + { + // validate queue + var resolvedPool = ValidateQueue(context, result, buildOptions); + Boolean includeTaskDemands = resolvedPool == null || !resolvedPool.IsHosted; + + // Add advanced-checkout min agent demand + Boolean advancedCheckout = false; + int checkoutTasks = 0; + int injectedSystemTasks = 0; + bool countInjectSystemTasks = true; + for (int index = 0; index < steps.Count; index++) + { + var step = steps[index]; + // Task + if (step.Type == StepType.Task) + { + var task = step as TaskStep; + if (task.Name.StartsWith("__system_")) + { + if (countInjectSystemTasks) + { + injectedSystemTasks++; + } + } + else if (task.IsCheckoutTask()) + { + countInjectSystemTasks = false; + checkoutTasks++; + if (context.EnvironmentVersion < 2) + { + if (index > 0 && index - injectedSystemTasks > 0) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.CheckoutMustBeTheFirstStep())); + } + } + else + { + if (index > 0) + { + advancedCheckout = true; + } + } + + if (task.Inputs.TryGetValue(PipelineConstants.CheckoutTaskInputs.Repository, out String repository) && + !String.Equals(repository, PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase) && + !String.Equals(repository, PipelineConstants.NoneAlias, StringComparison.OrdinalIgnoreCase) && + !String.Equals(repository, PipelineConstants.DesignerRepo, StringComparison.OrdinalIgnoreCase)) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.CheckoutStepRepositoryNotSupported(task.Inputs[PipelineConstants.CheckoutTaskInputs.Repository]))); + } + } + else + { + countInjectSystemTasks = false; + } + } + } + + if (checkoutTasks > 1) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.CheckoutMultipleRepositoryNotSupported())); + } + + if (advancedCheckout) + { + taskDemands.Add(new DemandMinimumVersion(PipelineConstants.AgentVersionDemandName, PipelineConstants.AdvancedCheckoutMinAgentVersion)); + } + + // Now we need to ensure we have only a single demand for the mimimum agent version. We effectively remove + // every agent version demand we find and keep track of the one with the highest value. Assuming we located + // one or more of these demands we will ensure it is merged in at the end. + var minimumAgentVersionDemand = ResolveAgentVersionDemand(taskDemands); + minimumAgentVersionDemand = ResolveAgentVersionDemand(this.Demands, minimumAgentVersionDemand); + + // not include demands from task if phase is running inside container + // container suppose provide any required tool task needs + if (this.Container != null) + { + includeTaskDemands = false; + } + + // Merge the phase demands with the implicit demands from tasks. + if (includeTaskDemands && buildOptions.RollupStepDemands) + { + this.Demands.UnionWith(taskDemands); + } + + // If we resolved a minimum agent version demand then we go ahead and merge it in + // We want to do this even if targetting Hosted + if (minimumAgentVersionDemand != null) + { + this.Demands.Add(minimumAgentVersionDemand); + } + } + + private static DemandMinimumVersion ResolveAgentVersionDemand( + ISet demands, + DemandMinimumVersion currentMinimumVersion = null) + { + var minVersionDemand = DemandMinimumVersion.MaxAndRemove(demands); + if (minVersionDemand != null && (currentMinimumVersion == null || DemandMinimumVersion.CompareVersion(minVersionDemand.Value, currentMinimumVersion.Value) > 0)) + { + return minVersionDemand; + } + else + { + return currentMinimumVersion; + } + } + + internal override JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + String jobName, + Int32 attempt, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory) + { + context.Trace?.EnterProperty("CreateJobContext"); + var execution = this.Execution ?? new ParallelExecutionOptions(); + var jobContext = execution.CreateJobContext( + context, + jobName, + attempt, + this.Container, + this.SidecarContainers, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + jobFactory); + context.Trace?.LeaveProperty("CreateJobContext"); + + if (jobContext != null) + { + jobContext.Job.Definition.Workspace = this.Workspace?.Clone(); + } + + return jobContext; + } + + internal override ExpandPhaseResult Expand( + PhaseExecutionContext context, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory, + JobExpansionOptions options) + { + context.Trace?.EnterProperty("Expand"); + var execution = this.Execution ?? new ParallelExecutionOptions(); + var result = execution.Expand( + context, + this.Container, + this.SidecarContainers, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + jobFactory, + options); + context.Trace?.LeaveProperty("Expand"); + + foreach (var job in result.Jobs) + { + job.Definition.Workspace = this.Workspace?.Clone(); + } + + return result; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_sidecarContainers?.Count == 0) + { + m_sidecarContainers = null; + } + } + + [DataMember(Name = "SidecarContainers", EmitDefaultValue = false)] + private IDictionary> m_sidecarContainers; + + /// + /// Ensures conversion of a TaskAgentQueue into an AgentQueueReference works properly when the serializer + /// is configured to write/honor type information. This is a temporary converter that may be removed after + /// M127 ships. + /// + private sealed class QueueJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite => false; + + public override Boolean CanConvert(Type objectType) + { + return objectType.Equals(typeof(AgentQueueReference)); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + var rawValue = JObject.Load(reader); + using (var objectReader = rawValue.CreateReader()) + { + var newValue = new AgentQueueReference(); + serializer.Populate(objectReader, newValue); + return newValue; + } + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Artifacts/ArtifactConstants.cs b/src/Sdk/DTPipelines/Pipelines/Artifacts/ArtifactConstants.cs new file mode 100644 index 00000000000..79025035972 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Artifacts/ArtifactConstants.cs @@ -0,0 +1,15 @@ +using System; + +namespace GitHub.DistributedTask.Pipelines.Artifacts +{ + public static class ArtifactConstants + { + internal static class ArtifactType + { + internal const String Build = nameof(Build); + internal const String Container = nameof(Container); + internal const String Package = nameof(Package); + internal const String SourceControl = nameof(SourceControl); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Artifacts/DownloadStepExtensions.cs b/src/Sdk/DTPipelines/Pipelines/Artifacts/DownloadStepExtensions.cs new file mode 100644 index 00000000000..192e71bd2ac --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Artifacts/DownloadStepExtensions.cs @@ -0,0 +1,150 @@ +using System; +using System.Collections.Generic; + +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.Pipelines.Artifacts; +namespace GitHub.DistributedTask.Orchestration.Server.Artifacts +{ + public static class DownloadStepExtensions + { + public static Boolean IsDownloadBuildStepExists(this IReadOnlyList steps) + { + foreach (var step in steps) + { + if (step is TaskStep taskStep) + { + if (taskStep.IsDownloadBuildTask()) + { + return true; + } + } + } + + return false; + } + + public static Boolean IsDownloadBuildTask(this Step step) + { + if (step is TaskStep taskStep && + taskStep.Reference != null && + taskStep.Reference.Name.Equals(YamlArtifactConstants.DownloadBuild, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + return false; + } + + public static Boolean IsDownloadStepDisabled(this Step step) + { + // either download task or downloadBuild task has none keyword return true. + if (step is TaskStep taskStep && + taskStep.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) && + String.Equals(alias, YamlArtifactConstants.None, StringComparison.OrdinalIgnoreCase) && + (step.IsDownloadBuildTask() || step.IsDownloadTask())) + { + return true; + } + + return false; + } + + public static Boolean IsDownloadTask(this Step step) + { + if (step is TaskStep taskStep && + taskStep.Reference != null && + taskStep.Reference.Id.Equals(PipelineArtifactConstants.DownloadTask.Id) && + taskStep.Reference.Version == PipelineArtifactConstants.DownloadTask.Version) + { + return true; + } + else + { + return false; + } + } + + public static Boolean IsDownloadCurrentPipelineArtifactStep(this Step step) + { + if (step is TaskStep taskStep && + taskStep.IsDownloadTask() && + taskStep.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) && + String.Equals(alias, YamlArtifactConstants.Current, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + return false; + } + + public static Boolean IsDownloadPipelineArtifactStepDisabled(this TaskStep step) + { + if (step.IsDownloadTask() && + step.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) && + String.Equals(alias, YamlArtifactConstants.None, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + return false; + } + + public static Boolean IsDownloadExternalPipelineArtifactStep(this TaskStep step) + { + if (step.IsDownloadTask() && + step.Inputs != null && + step.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) && + !String.IsNullOrEmpty(alias) && + !alias.Equals(YamlArtifactConstants.Current, StringComparison.OrdinalIgnoreCase) && + !alias.Equals(YamlArtifactConstants.None, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + return false; + } + + public static String GetAliasFromTaskStep(this TaskStep step) + { + return step.Inputs.TryGetValue(PipelineArtifactConstants.DownloadTaskInputs.Alias, out String alias) + ? alias + : String.Empty; + } + + public static Boolean IsDownloadPipelineArtifactStepExists(this IReadOnlyList steps) + { + foreach (var step in steps) + { + if (step is TaskStep taskStep) + { + if (taskStep.IsDownloadTask()) + { + return true; + } + } + } + + return false; + } + + public static void Merge( + this IDictionary first, + IDictionary second) + { + foreach (var key in second?.Keys ?? new List()) + { + first[key] = second[key]; + } + } + + public static void Merge( + this IDictionary first, + IReadOnlyDictionary second) + { + foreach (var key in second?.Keys ?? new List()) + { + first[key] = second[key]; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Artifacts/IArtifactResolver.cs b/src/Sdk/DTPipelines/Pipelines/Artifacts/IArtifactResolver.cs new file mode 100644 index 00000000000..aee03a05d33 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Artifacts/IArtifactResolver.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines.Artifacts +{ + /// + /// Provides a mechanism to resolve the artifacts + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IArtifactResolver + { + /// + /// Given a resource, it gets the corresponding task id from its extension + /// + /// + /// + Guid GetArtifactDownloadTaskId(Resource resource); + + /// + /// Given a resource and step, it maps the resource properties to task inputs + /// + /// + /// + void PopulateMappedTaskInputs(Resource resource, TaskStep taskStep); + + /// + /// Given an artifact step, it resolves the artifact and returns a download artifact task + /// + /// + /// + /// + Boolean ResolveStep(IPipelineContext pipelineContext, JobStep step, out IList resolvedSteps); + + /// + /// Given resource store and task step it translate the taskStep into actual task reference with mapped inputs + /// + /// + /// + /// + Boolean ResolveStep(IResourceStore resourceStore, TaskStep taskStep, out String errorMessage); + + /// + /// Validate the given resource in the YAML file. Also resolve version for the resource if not resolved already + /// + /// + Boolean ValidateDeclaredResource(Resource resource, out PipelineValidationError error); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Artifacts/PipelineArtifactConstants.cs b/src/Sdk/DTPipelines/Pipelines/Artifacts/PipelineArtifactConstants.cs new file mode 100644 index 00000000000..867756bdf1a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Artifacts/PipelineArtifactConstants.cs @@ -0,0 +1,113 @@ +using System; + +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Artifacts +{ + public static class PipelineArtifactConstants + { + internal static class CommonArtifactTaskInputValues + { + internal const String DefaultDownloadPath = "$(Pipeline.Workspace)"; + internal const String DefaultDownloadPattern = "**"; + } + + public static class PipelineArtifactTaskInputs + { + public const String ArtifactName = "artifactName"; + + public const String BuildType = "buildType"; + + public const String BuildId = "buildId"; + + public const String BuildVersionToDownload = "buildVersionToDownload"; + + public const String Definition = "definition"; + + public const String DownloadType = "downloadType"; + + public const String DownloadPath = "downloadPath"; + + public const String FileSharePath = "fileSharePath"; + + public const String ItemPattern = "itemPattern"; + + public const String Project = "project"; + } + + public static class PipelineArtifactTaskInputValues + { + public const String DownloadTypeSingle = "single"; + public const String SpecificBuildType = "specific"; + public const String CurrentBuildType = "current"; + public const String AutomaticMode = "automatic"; + public const String ManualMode = "manual"; + } + + internal static class YamlConstants + { + internal const String Connection = "connection"; + internal const String Current = "current"; + internal const String None = "none"; + } + + public static class ArtifactTypes + { + public const string AzurePipelineArtifactType = "Pipeline"; + } + + public static class DownloadTaskInputs + { + public const String Alias = "alias"; + public const String Artifact = "artifact"; + public const String Mode = "mode"; + public const String Path = "path"; + public const String Patterns = "patterns"; + } + + public static class TraceConstants + { + public const String Area = "PipelineArtifacts"; + public const String DownloadPipelineArtifactFeature = "DownloadPipelineArtifact"; + } + + public static readonly TaskDefinition DownloadTask = new TaskDefinition + { + Id = new Guid("30f35852-3f7e-4c0c-9a88-e127b4f97211"), + Name = "Download", + FriendlyName = "Download Artifact", + Author = "Microsoft", + RunsOn = { TaskRunsOnConstants.RunsOnAgent }, + Version = new TaskVersion("1.0.0"), + Description = "Downloads pipeline type artifacts.", + HelpMarkDown = "[More Information](https://github.com)", + Inputs = { + new TaskInputDefinition() + { + Name = DownloadTaskInputs.Artifact, + Required = true, + InputType = TaskInputType.String + }, + new TaskInputDefinition() + { + Name = DownloadTaskInputs.Patterns, + Required = false, + DefaultValue = "**", + InputType = TaskInputType.String + }, + new TaskInputDefinition() + { + Name = DownloadTaskInputs.Path, + Required = false, + InputType = TaskInputType.String + }, + new TaskInputDefinition() + { + Name=DownloadTaskInputs.Alias, + Required = false, + InputType = TaskInputType.String + } + }, + }; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Artifacts/YamlArtifactConstants.cs b/src/Sdk/DTPipelines/Pipelines/Artifacts/YamlArtifactConstants.cs new file mode 100644 index 00000000000..5fe5bba9ab9 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Artifacts/YamlArtifactConstants.cs @@ -0,0 +1,16 @@ +using System; + +namespace GitHub.DistributedTask.Pipelines.Artifacts +{ + public static class YamlArtifactConstants + { + public const String Alias = "alias"; + public const String Connection = "connection"; + public const String Current = "current"; + public const String Download = "download"; + public const String DownloadBuild = "downloadBuild"; + public const String None = "none"; + public const String Path = "path"; + public const String Patterns = "patterns"; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/BuildOptions.cs b/src/Sdk/DTPipelines/Pipelines/BuildOptions.cs new file mode 100644 index 00000000000..c92128945d6 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/BuildOptions.cs @@ -0,0 +1,119 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for controlling validation behaviors. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class BuildOptions + { + public static BuildOptions None { get; } = new BuildOptions(); + + /// + /// Gets or sets a value indicating whether or not a queue target without a queue should be considered an + /// error. + /// + public Boolean AllowEmptyQueueTarget + { + get; + set; + } + + /// + /// Allow hyphens in names checked by the NameValidator. Used for yaml workflow schema + /// + public Boolean AllowHyphenNames + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether to demand the latest agent version. + /// + public Boolean DemandLatestAgent + { + get; + set; + } + + /// + /// If true, resource definitions are allowed to use expressions + /// + public Boolean EnableResourceExpressions + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not to resolve resource version. + /// + public Boolean ResolveResourceVersions + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether input aliases defined in a task definition are honored. + /// + public Boolean ResolveTaskInputAliases + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not the individual step demands should be rolled up into their + /// parent phase's demands. Settings this value to true will result in Phase's demand sets being a superset + /// of their children's demands. + /// + public Boolean RollupStepDemands + { + get; + set; + } + + /// + /// If true, all expressions must be resolvable given a provided context. + /// This is normally going to be false for plan compile time and true for plan runtime. + /// + public Boolean ValidateExpressions + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not to validate resource existence and other constraints. + /// + public Boolean ValidateResources + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not step names provided by the caller should be validated for + /// correctness and uniqueness. Setting this value to false will automatically fix invalid step names and + /// de-duplicate step names which may lead to unexpected behavior at runtime when binding output variables. + /// + public Boolean ValidateStepNames + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not to run input validation defined by the task author. + /// + public Boolean ValidateTaskInputs + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/BuildResource.cs b/src/Sdk/DTPipelines/Pipelines/BuildResource.cs new file mode 100644 index 00000000000..b6e6a15a467 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/BuildResource.cs @@ -0,0 +1,68 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class BuildPropertyNames + { + public static readonly String Branch = "branch"; + public static readonly String Connection = "connection"; + public static readonly String Source = "source"; + public static readonly String Type = "type"; + public static readonly String Version = "version"; + } + + /// + /// Provides a data contract for a build resource referenced by a pipeline. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class BuildResource : Resource + { + public BuildResource() + { + } + + protected BuildResource(BuildResource resourceToCopy) + : base(resourceToCopy) + { + } + + /// + /// Gets or sets the type of build resource. + /// + public String Type + { + get + { + return this.Properties.Get(BuildPropertyNames.Type); + } + set + { + this.Properties.Set(BuildPropertyNames.Type, value); + } + } + + /// + /// Gets or sets the version of the build resource. + /// + public String Version + { + get + { + return this.Properties.Get(BuildPropertyNames.Version); + } + set + { + this.Properties.Set(BuildPropertyNames.Version, value); + } + } + + public BuildResource Clone() + { + return new BuildResource(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointContext.cs b/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointContext.cs new file mode 100644 index 00000000000..c33bd72e134 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointContext.cs @@ -0,0 +1,63 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.Pipelines.Checkpoints +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + [ClientIgnore] + public class CheckpointContext + { + /// + /// Unique id of the checkpoint, also used as the timeline record id + /// + [DataMember(IsRequired = true)] + public Guid Id { get; set; } + + /// + /// Auth token for querying DistributedTask + /// + [DataMember(IsRequired = true)] + public String Token { get; set; } + + /// + /// Checkpoint Instance Id + /// Use this for sending decision events and tracing telemetry. + /// + [DataMember(IsRequired = true)] + public String OrchestrationId { get; set; } + + /// + /// PlanId + /// + [DataMember(IsRequired = true)] + public Guid PlanId { get; set; } + + /// + /// Which TaskHub to use when sending decision events; + /// Use this for sending decision events. + /// + [DataMember(IsRequired = true)] + public String HubName { get; set; } + + /// + /// The project requesting decision. + /// + [DataMember(EmitDefaultValue = false)] + public CheckpointScope Project { get; set; } + + /// + /// The pipeline (definition) requesting decision. + /// + [DataMember(EmitDefaultValue = false)] + public PipelineScope Pipeline { get; set; } + + /// + /// The graph node requesting decision. + /// + [DataMember(EmitDefaultValue = false)] + public GraphNodeScope GraphNode { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointDecision.cs b/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointDecision.cs new file mode 100644 index 00000000000..28bdbbdbf6f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointDecision.cs @@ -0,0 +1,36 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.Pipelines.Checkpoints +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + [ClientIgnore] + public class CheckpointDecision + { + /// + /// Checkpoint id, provided on context + /// + [DataMember(IsRequired = true)] + public Guid Id { get; set; } + + /// + /// Decision + /// + [DataMember(IsRequired = true)] + public String Result { get; set; } + + /// + /// Additional information (optional) + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Message { get; set; } + + // Decision possibilities + public const String Approved = "Approved"; + public const String Denied = "Denied"; + public const String Canceled = "Canceled"; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointScope.cs b/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointScope.cs new file mode 100644 index 00000000000..bfcab3c7934 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Checkpoints/CheckpointScope.cs @@ -0,0 +1,56 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.Pipelines.Checkpoints +{ + /// + /// Provides context regarding the state of the orchestration. + /// Consumers may choose to use this information to cache decisions. + /// EG, if you wanted to return the same decision for this and all + /// future requests issuing from the same project / pipeline / stage / run + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + [ClientIgnore] + public class CheckpointScope + { + /// + /// May be used in uniquely identify this scope for future reference. + /// + [DataMember(IsRequired = true)] + public String Id { get; set; } + + /// + /// The friendly name of the scope + /// + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + [ClientIgnore] + public class GraphNodeScope : CheckpointScope + { + /// + /// Facilitates approving only a single attempt of a graph node in a specific run of a pipeline. + /// + [DataMember(IsRequired = true)] + public Int32 Attempt { get; set; } = 1; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + [ClientIgnore] + public class PipelineScope : CheckpointScope + { + /// + /// Pipeline URLs + /// + [DataMember(IsRequired = true)] + public TaskOrchestrationOwner Owner { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Checkpoints/ResourceInfo.cs b/src/Sdk/DTPipelines/Pipelines/Checkpoints/ResourceInfo.cs new file mode 100644 index 00000000000..66daffc0d91 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Checkpoints/ResourceInfo.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.Pipelines.Checkpoints +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + [ClientIgnore] + public class ResourceInfo + { + [DataMember(EmitDefaultValue = false)] + public String Id { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String TypeName { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ConditionResult.cs b/src/Sdk/DTPipelines/Pipelines/ConditionResult.cs new file mode 100644 index 00000000000..1ca93e7576f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ConditionResult.cs @@ -0,0 +1,25 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ConditionResult + { + [DataMember] + public Boolean Value + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Trace + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContainerResource.cs b/src/Sdk/DTPipelines/Pipelines/ContainerResource.cs new file mode 100644 index 00000000000..113635cfc51 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContainerResource.cs @@ -0,0 +1,113 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ContainerPropertyNames + { + public const String Env = "env"; + public const String Image = "image"; + public const String Options = "options"; + public const String Volumes = "volumes"; + public const String Ports = "ports"; + } + + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ContainerResource : Resource + { + [JsonConstructor] + public ContainerResource() + { + } + + private ContainerResource(ContainerResource referenceToCopy) + : base(referenceToCopy) + { + } + + /// + /// Gets or sets the environment which is provided to the container. + /// + public IDictionary Environment + { + get + { + return this.Properties.Get>(ContainerPropertyNames.Env); + } + set + { + this.Properties.Set(ContainerPropertyNames.Env, value); + } + } + + /// + /// Gets or sets the container image name. + /// + public String Image + { + get + { + return this.Properties.Get(ContainerPropertyNames.Image); + } + set + { + this.Properties.Set(ContainerPropertyNames.Image, value); + } + } + + /// + /// Gets or sets the options used for the container instance. + /// + public String Options + { + get + { + return this.Properties.Get(ContainerPropertyNames.Options); + } + set + { + this.Properties.Set(ContainerPropertyNames.Options, value); + } + } + + /// + /// Gets or sets the volumes which are mounted into the container. + /// + public IList Volumes + { + get + { + return this.Properties.Get>(ContainerPropertyNames.Volumes); + } + set + { + this.Properties.Set(ContainerPropertyNames.Volumes, value); + } + } + + /// + /// Gets or sets the ports which are exposed on the container. + /// + public IList Ports + { + get + { + return this.Properties.Get>(ContainerPropertyNames.Ports); + } + set + { + this.Properties.Set(ContainerPropertyNames.Ports, value); + } + } + + public ContainerResource Clone() + { + return new ContainerResource(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/ArrayContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/ArrayContextData.cs new file mode 100644 index 00000000000..f38f15634f3 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/ArrayContextData.cs @@ -0,0 +1,113 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [DataContract] + [JsonObject] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ArrayContextData : PipelineContextData, IEnumerable, IReadOnlyArray + { + public ArrayContextData() + : base(PipelineContextDataType.Array) + { + } + + [IgnoreDataMember] + public Int32 Count => m_items?.Count ?? 0; + + public PipelineContextData this[Int32 index] => m_items[index]; + + Object IReadOnlyArray.this[Int32 index] => m_items[index]; + + public void Add(PipelineContextData item) + { + if (m_items == null) + { + m_items = new List(); + } + + m_items.Add(item); + } + + public override PipelineContextData Clone() + { + var result = new ArrayContextData(); + if (m_items?.Count > 0) + { + result.m_items = new List(m_items.Count); + foreach (var item in m_items) + { + result.m_items.Add(item); + } + } + return result; + } + + public override JToken ToJToken() + { + var result = new JArray(); + if (m_items?.Count > 0) + { + foreach (var item in m_items) + { + result.Add(item?.ToJToken() ?? JValue.CreateNull()); + } + } + return result; + } + + public IEnumerator GetEnumerator() + { + if (m_items?.Count > 0) + { + foreach (var item in m_items) + { + yield return item; + } + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + if (m_items?.Count > 0) + { + foreach (var item in m_items) + { + yield return item; + } + } + } + + IEnumerator IReadOnlyArray.GetEnumerator() + { + if (m_items?.Count > 0) + { + foreach (var item in m_items) + { + yield return item; + } + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_items?.Count == 0) + { + m_items = null; + } + } + + [DataMember(Name = "a", EmitDefaultValue = false)] + private List m_items; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/BooleanContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/BooleanContextData.cs new file mode 100644 index 00000000000..073b098bb57 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/BooleanContextData.cs @@ -0,0 +1,62 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class BooleanContextData : PipelineContextData, IBoolean + { + public BooleanContextData(Boolean value) + : base(PipelineContextDataType.Boolean) + { + m_value = value; + } + + public Boolean Value + { + get + { + return m_value; + } + } + + public override PipelineContextData Clone() + { + return new BooleanContextData(m_value); + } + + public override JToken ToJToken() + { + return (JToken)m_value; + } + + public override String ToString() + { + return m_value ? "true" : "false"; + } + + Boolean IBoolean.GetBoolean() + { + return Value; + } + + public static implicit operator Boolean(BooleanContextData data) + { + return data.Value; + } + + public static implicit operator BooleanContextData(Boolean data) + { + return new BooleanContextData(data); + } + + [DataMember(Name = "b", EmitDefaultValue = false)] + private Boolean m_value; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/CaseSensitiveDictionaryContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/CaseSensitiveDictionaryContextData.cs new file mode 100644 index 00000000000..a1fa19ec71e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/CaseSensitiveDictionaryContextData.cs @@ -0,0 +1,293 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [DataContract] + [JsonObject] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public class CaseSensitiveDictionaryContextData : PipelineContextData, IEnumerable>, IReadOnlyObject + { + public CaseSensitiveDictionaryContextData() + : base(PipelineContextDataType.CaseSensitiveDictionary) + { + } + + [IgnoreDataMember] + public Int32 Count => m_list?.Count ?? 0; + + [IgnoreDataMember] + public IEnumerable Keys + { + get + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return pair.Key; + } + } + } + } + + [IgnoreDataMember] + public IEnumerable Values + { + get + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return pair.Value; + } + } + } + } + + IEnumerable IReadOnlyObject.Values + { + get + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return pair.Value; + } + } + } + } + + private Dictionary IndexLookup + { + get + { + if (m_indexLookup == null) + { + m_indexLookup = new Dictionary(StringComparer.Ordinal); + if (m_list?.Count > 0) + { + for (var i = 0; i < m_list.Count; i++) + { + var pair = m_list[i]; + m_indexLookup.Add(pair.Key, i); + } + } + } + + return m_indexLookup; + } + } + + private List List + { + get + { + if (m_list == null) + { + m_list = new List(); + } + + return m_list; + } + } + + public PipelineContextData this[String key] + { + get + { + var index = IndexLookup[key]; + return m_list[index].Value; + } + + set + { + // Existing + if (IndexLookup.TryGetValue(key, out var index)) + { + key = m_list[index].Key; // preserve casing + m_list[index] = new DictionaryContextDataPair(key, value); + } + // New + else + { + Add(key, value); + } + } + } + + Object IReadOnlyObject.this[String key] + { + get + { + var index = IndexLookup[key]; + return m_list[index].Value; + } + } + + internal KeyValuePair this[Int32 index] + { + get + { + var pair = m_list[index]; + return new KeyValuePair(pair.Key, pair.Value); + } + } + + public void Add(IEnumerable> pairs) + { + foreach (var pair in pairs) + { + Add(pair.Key, pair.Value); + } + } + + public void Add( + String key, + PipelineContextData value) + { + IndexLookup.Add(key, m_list?.Count ?? 0); + List.Add(new DictionaryContextDataPair(key, value)); + } + + public override PipelineContextData Clone() + { + var result = new CaseSensitiveDictionaryContextData(); + + if (m_list?.Count > 0) + { + result.m_list = new List(m_list.Count); + foreach (var item in m_list) + { + result.m_list.Add(new DictionaryContextDataPair(item.Key, item.Value?.Clone())); + } + } + + return result; + } + + public override JToken ToJToken() + { + var json = new JObject(); + if (m_list?.Count > 0) + { + foreach (var item in m_list) + { + json.Add(item.Key, item.Value?.ToJToken() ?? JValue.CreateNull()); + } + } + return json; + } + + public Boolean ContainsKey(String key) + { + return TryGetValue(key, out _); + } + + public IEnumerator> GetEnumerator() + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return new KeyValuePair(pair.Key, pair.Value); + } + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return new KeyValuePair(pair.Key, pair.Value); + } + } + } + + IEnumerator IReadOnlyObject.GetEnumerator() + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return new KeyValuePair(pair.Key, pair.Value); + } + } + } + + public Boolean TryGetValue( + String key, + out PipelineContextData value) + { + if (m_list?.Count > 0 && + IndexLookup.TryGetValue(key, out var index)) + { + value = m_list[index].Value; + return true; + } + + value = null; + return false; + } + + Boolean IReadOnlyObject.TryGetValue( + String key, + out Object value) + { + if (TryGetValue(key, out PipelineContextData data)) + { + value = data; + return true; + } + + value = null; + return false; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_list?.Count == 0) + { + m_list = null; + } + } + + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + private sealed class DictionaryContextDataPair + { + public DictionaryContextDataPair( + String key, + PipelineContextData value) + { + Key = key; + Value = value; + } + + [DataMember(Name = "k")] + public readonly String Key; + + [DataMember(Name = "v")] + public readonly PipelineContextData Value; + } + + private Dictionary m_indexLookup; + + [DataMember(Name = "d", EmitDefaultValue = false)] + private List m_list; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/DictionaryContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/DictionaryContextData.cs new file mode 100644 index 00000000000..a766c66bc67 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/DictionaryContextData.cs @@ -0,0 +1,293 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [DataContract] + [JsonObject] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public class DictionaryContextData : PipelineContextData, IEnumerable>, IReadOnlyObject + { + public DictionaryContextData() + : base(PipelineContextDataType.Dictionary) + { + } + + [IgnoreDataMember] + public Int32 Count => m_list?.Count ?? 0; + + [IgnoreDataMember] + public IEnumerable Keys + { + get + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return pair.Key; + } + } + } + } + + [IgnoreDataMember] + public IEnumerable Values + { + get + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return pair.Value; + } + } + } + } + + IEnumerable IReadOnlyObject.Values + { + get + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return pair.Value; + } + } + } + } + + private Dictionary IndexLookup + { + get + { + if (m_indexLookup == null) + { + m_indexLookup = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (m_list?.Count > 0) + { + for (var i = 0; i < m_list.Count; i++) + { + var pair = m_list[i]; + m_indexLookup.Add(pair.Key, i); + } + } + } + + return m_indexLookup; + } + } + + private List List + { + get + { + if (m_list == null) + { + m_list = new List(); + } + + return m_list; + } + } + + public PipelineContextData this[String key] + { + get + { + var index = IndexLookup[key]; + return m_list[index].Value; + } + + set + { + // Existing + if (IndexLookup.TryGetValue(key, out var index)) + { + key = m_list[index].Key; // preserve casing + m_list[index] = new DictionaryContextDataPair(key, value); + } + // New + else + { + Add(key, value); + } + } + } + + Object IReadOnlyObject.this[String key] + { + get + { + var index = IndexLookup[key]; + return m_list[index].Value; + } + } + + internal KeyValuePair this[Int32 index] + { + get + { + var pair = m_list[index]; + return new KeyValuePair(pair.Key, pair.Value); + } + } + + public void Add(IEnumerable> pairs) + { + foreach (var pair in pairs) + { + Add(pair.Key, pair.Value); + } + } + + public void Add( + String key, + PipelineContextData value) + { + IndexLookup.Add(key, m_list?.Count ?? 0); + List.Add(new DictionaryContextDataPair(key, value)); + } + + public override PipelineContextData Clone() + { + var result = new DictionaryContextData(); + + if (m_list?.Count > 0) + { + result.m_list = new List(m_list.Count); + foreach (var item in m_list) + { + result.m_list.Add(new DictionaryContextDataPair(item.Key, item.Value?.Clone())); + } + } + + return result; + } + + public override JToken ToJToken() + { + var json = new JObject(); + if (m_list?.Count > 0) + { + foreach (var item in m_list) + { + json.Add(item.Key, item.Value?.ToJToken() ?? JValue.CreateNull()); + } + } + return json; + } + + public Boolean ContainsKey(String key) + { + return TryGetValue(key, out _); + } + + public IEnumerator> GetEnumerator() + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return new KeyValuePair(pair.Key, pair.Value); + } + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return new KeyValuePair(pair.Key, pair.Value); + } + } + } + + IEnumerator IReadOnlyObject.GetEnumerator() + { + if (m_list?.Count > 0) + { + foreach (var pair in m_list) + { + yield return new KeyValuePair(pair.Key, pair.Value); + } + } + } + + public Boolean TryGetValue( + String key, + out PipelineContextData value) + { + if (m_list?.Count > 0 && + IndexLookup.TryGetValue(key, out var index)) + { + value = m_list[index].Value; + return true; + } + + value = null; + return false; + } + + Boolean IReadOnlyObject.TryGetValue( + String key, + out Object value) + { + if (TryGetValue(key, out PipelineContextData data)) + { + value = data; + return true; + } + + value = null; + return false; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_list?.Count == 0) + { + m_list = null; + } + } + + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + private sealed class DictionaryContextDataPair + { + public DictionaryContextDataPair( + String key, + PipelineContextData value) + { + Key = key; + Value = value; + } + + [DataMember(Name = "k")] + public readonly String Key; + + [DataMember(Name = "v")] + public readonly PipelineContextData Value; + } + + private Dictionary m_indexLookup; + + [DataMember(Name = "d", EmitDefaultValue = false)] + private List m_list; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/JTokenExtensions.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/JTokenExtensions.cs new file mode 100644 index 00000000000..4d83a44983d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/JTokenExtensions.cs @@ -0,0 +1,64 @@ +using System; +using System.ComponentModel; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class JTokenExtensions + { + public static PipelineContextData ToPipelineContextData(this JToken value) + { + return value.ToPipelineContextData(1, 100); + } + + public static PipelineContextData ToPipelineContextData( + this JToken value, + Int32 depth, + Int32 maxDepth) + { + if (depth < maxDepth) + { + if (value.Type == JTokenType.String) + { + return new StringContextData((String)value); + } + else if (value.Type == JTokenType.Boolean) + { + return new BooleanContextData((Boolean)value); + } + else if (value.Type == JTokenType.Float || value.Type == JTokenType.Integer) + { + return new NumberContextData((Double)value); + } + else if (value.Type == JTokenType.Object) + { + var subContext = new DictionaryContextData(); + var obj = (JObject)value; + foreach (var property in obj.Properties()) + { + subContext[property.Name] = ToPipelineContextData(property.Value, depth + 1, maxDepth); + } + return subContext; + } + else if (value.Type == JTokenType.Array) + { + var arrayContext = new ArrayContextData(); + var arr = (JArray)value; + foreach (var element in arr) + { + arrayContext.Add(ToPipelineContextData(element, depth + 1, maxDepth)); + } + return arrayContext; + } + else if (value.Type == JTokenType.Null) + { + return null; + } + } + + // We don't understand the type or have reached our max, return as string + return new StringContextData(value.ToString()); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/NumberContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/NumberContextData.cs new file mode 100644 index 00000000000..07d2172bcdb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/NumberContextData.cs @@ -0,0 +1,77 @@ +using System; +using System.ComponentModel; +using System.Globalization; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class NumberContextData : PipelineContextData, INumber + { + public NumberContextData(Double value) + : base(PipelineContextDataType.Number) + { + m_value = value; + } + + public Double Value + { + get + { + return m_value; + } + } + + public override PipelineContextData Clone() + { + return new NumberContextData(m_value); + } + + public override JToken ToJToken() + { + if (Double.IsNaN(m_value) || m_value == Double.PositiveInfinity || m_value == Double.NegativeInfinity) + { + return (JToken)m_value; + } + + var floored = Math.Floor(m_value); + if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue) + { + Int32 flooredInt = (Int32)floored; + return (JToken)flooredInt; + } + else + { + return (JToken)m_value; + } + } + + public override String ToString() + { + return m_value.ToString("G15", CultureInfo.InvariantCulture); + } + + Double INumber.GetNumber() + { + return Value; + } + + public static implicit operator Double(NumberContextData data) + { + return data.Value; + } + + public static implicit operator NumberContextData(Double data) + { + return new NumberContextData(data); + } + + [DataMember(Name = "n", EmitDefaultValue = false)] + private Double m_value; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextData.cs new file mode 100644 index 00000000000..7b2552c6198 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextData.cs @@ -0,0 +1,31 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + /// + /// Base class for all template tokens + /// + [DataContract] + [JsonConverter(typeof(PipelineContextDataJsonConverter))] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PipelineContextData + { + protected PipelineContextData(Int32 type) + { + Type = type; + } + + [DataMember(Name = "t", EmitDefaultValue = false)] + internal Int32 Type { get; } + + public abstract PipelineContextData Clone(); + + public abstract JToken ToJToken(); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataExtensions.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataExtensions.cs new file mode 100644 index 00000000000..164d57cb69b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataExtensions.cs @@ -0,0 +1,290 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class PipelineContextDataExtensions + { + [EditorBrowsable(EditorBrowsableState.Never)] + public static ArrayContextData AssertArray( + this PipelineContextData value, + String objectDescription) + { + if (value is ArrayContextData array) + { + return array; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(ArrayContextData)}' was expected."); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static DictionaryContextData AssertDictionary( + this PipelineContextData value, + String objectDescription) + { + if (value is DictionaryContextData dictionary) + { + return dictionary; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(DictionaryContextData)}' was expected."); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static StringContextData AssertString( + this PipelineContextData value, + String objectDescription) + { + if (value is StringContextData str) + { + return str; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(StringContextData)}' was expected."); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static BooleanContextData AssertBoolean( + this PipelineContextData value, + String objectDescription) + { + if (value is BooleanContextData boolValue) + { + return boolValue; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(BooleanContextData)}' was expected."); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static NumberContextData AssertNumber( + this PipelineContextData value, + String objectDescription) + { + if (value is NumberContextData num) + { + return num; + } + + throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(NumberContextData)}' was expected."); + } + + /// + /// Returns all context data objects (depth first) + /// + internal static IEnumerable Traverse(this PipelineContextData value) + { + return Traverse(value, omitKeys: false); + } + + /// + /// Returns all context data objects (depth first) + /// + internal static IEnumerable Traverse( + this PipelineContextData value, + Boolean omitKeys) + { + yield return value; + + if (value is ArrayContextData || value is DictionaryContextData) + { + var state = new TraversalState(null, value); + while (state != null) + { + if (state.MoveNext(omitKeys)) + { + value = state.Current; + yield return value; + + if (value is ArrayContextData || value is DictionaryContextData) + { + state = new TraversalState(state, value); + } + } + else + { + state = state.Parent; + } + } + } + } + + internal static JToken ToJToken(this PipelineContextData value) + { + JToken result; + + if (value is StringContextData str) + { + result = str.Value ?? String.Empty; + } + else if (value is BooleanContextData booleanValue) + { + result = booleanValue.Value; + } + else if (value is NumberContextData num) + { + result = num.Value; + } + else if (value is ArrayContextData array) + { + var jarray = new JArray(); + + foreach (var item in array) + { + jarray.Add(item.ToJToken()); // Recurse + } + + result = jarray; + } + else if (value is DictionaryContextData dictionary) + { + var jobject = new JObject(); + + foreach (var pair in dictionary) + { + var key = pair.Key ?? String.Empty; + var value2 = pair.Value.ToJToken(); // Recurse + + if (value2 != null) + { + jobject[key] = value2; + } + } + + result = jobject; + } + else + { + throw new InvalidOperationException("Internal error reading the template. Expected a string, an array, or a dictionary"); + } + + return result; + } + + internal static TemplateToken ToTemplateToken(this PipelineContextData data) + { + if (data is null) + { + return new NullToken(null, null, null); + } + + switch (data.Type) + { + case PipelineContextDataType.Dictionary: + var dictionary = data.AssertDictionary("dictionary"); + var mapping = new MappingToken(null, null, null); + if (dictionary.Count > 0) + { + foreach (var pair in dictionary) + { + var key = new StringToken(null, null, null, pair.Key); + var value = pair.Value.ToTemplateToken(); + mapping.Add(key, value); + } + } + return mapping; + + case PipelineContextDataType.Array: + var array = data.AssertArray("array"); + var sequence = new SequenceToken(null, null, null); + if (array.Count > 0) + { + foreach (var item in array) + { + sequence.Add(item.ToTemplateToken()); + } + } + return sequence; + + case PipelineContextDataType.String: + var stringData = data as StringContextData; + return new StringToken(null, null, null, stringData.Value); + + case PipelineContextDataType.Boolean: + var booleanData = data as BooleanContextData; + return new BooleanToken(null, null, null, booleanData.Value); + + case PipelineContextDataType.Number: + var numberData = data as NumberContextData; + return new NumberToken(null, null, null, numberData.Value); + + default: + throw new NotSupportedException($"Unexpected {nameof(PipelineContextDataType)} type '{data.Type}'"); + } + } + + private sealed class TraversalState + { + public TraversalState( + TraversalState parent, + PipelineContextData data) + { + Parent = parent; + m_data = data; + } + + public Boolean MoveNext(Boolean omitKeys) + { + switch (m_data.Type) + { + case PipelineContextDataType.Array: + var array = m_data.AssertArray("array"); + if (++m_index < array.Count) + { + Current = array[m_index]; + return true; + } + else + { + Current = null; + return false; + } + + case PipelineContextDataType.Dictionary: + var dictionary = m_data.AssertDictionary("dictionary"); + + // Return the value + if (m_isKey) + { + m_isKey = false; + Current = dictionary[m_index].Value; + return true; + } + + if (++m_index < dictionary.Count) + { + // Skip the key, return the value + if (omitKeys) + { + m_isKey = false; + Current = dictionary[m_index].Value; + return true; + } + + // Return the key + m_isKey = true; + Current = new StringContextData(dictionary[m_index].Key); + return true; + } + + Current = null; + return false; + + default: + throw new NotSupportedException($"Unexpected {nameof(PipelineContextData)} type '{m_data.Type}'"); + } + } + + private PipelineContextData m_data; + private Int32 m_index = -1; + private Boolean m_isKey; + public PipelineContextData Current; + public TraversalState Parent; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataJsonConverter.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataJsonConverter.cs new file mode 100644 index 00000000000..ce861953503 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataJsonConverter.cs @@ -0,0 +1,200 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + /// + /// JSON serializer for ContextData objects + /// + internal sealed class PipelineContextDataJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return true; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(PipelineContextData).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + switch (reader.TokenType) + { + case JsonToken.String: + return new StringContextData(reader.Value.ToString()); + + case JsonToken.Boolean: + return new BooleanContextData((Boolean)reader.Value); + + case JsonToken.Float: + return new NumberContextData((Double)reader.Value); + + case JsonToken.Integer: + return new NumberContextData((Double)(Int64)reader.Value); + + case JsonToken.StartObject: + break; + + default: + return null; + } + + Int32? type = null; + JObject value = JObject.Load(reader); + if (!value.TryGetValue("t", StringComparison.OrdinalIgnoreCase, out JToken typeValue)) + { + type = PipelineContextDataType.String; + } + else if (typeValue.Type == JTokenType.Integer) + { + type = (Int32)typeValue; + } + else + { + return existingValue; + } + + Object newValue = null; + switch (type) + { + case PipelineContextDataType.String: + newValue = new StringContextData(null); + break; + + case PipelineContextDataType.Array: + newValue = new ArrayContextData(); + break; + + case PipelineContextDataType.Dictionary: + newValue = new DictionaryContextData(); + break; + + case PipelineContextDataType.Boolean: + newValue = new BooleanContextData(false); + break; + + case PipelineContextDataType.Number: + newValue = new NumberContextData(0); + break; + + case PipelineContextDataType.CaseSensitiveDictionary: + newValue = new CaseSensitiveDictionaryContextData(); + break; + + default: + throw new NotSupportedException($"Unexpected {nameof(PipelineContextDataType)} '{type}'"); + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + if (Object.ReferenceEquals(value, null)) + { + writer.WriteNull(); + } + else if (value is StringContextData stringData) + { + writer.WriteValue(stringData.Value); + } + else if (value is BooleanContextData boolData) + { + writer.WriteValue(boolData.Value); + } + else if (value is NumberContextData numberData) + { + writer.WriteValue(numberData.Value); + } + else if (value is ArrayContextData arrayData) + { + writer.WriteStartObject(); + writer.WritePropertyName("t"); + writer.WriteValue(PipelineContextDataType.Array); + if (arrayData.Count > 0) + { + writer.WritePropertyName("a"); + writer.WriteStartArray(); + foreach (var item in arrayData) + { + serializer.Serialize(writer, item); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + else if (value is DictionaryContextData dictionaryData) + { + writer.WriteStartObject(); + writer.WritePropertyName("t"); + writer.WriteValue(PipelineContextDataType.Dictionary); + if (dictionaryData.Count > 0) + { + writer.WritePropertyName("d"); + writer.WriteStartArray(); + foreach (var pair in dictionaryData) + { + writer.WriteStartObject(); + writer.WritePropertyName("k"); + writer.WriteValue(pair.Key); + writer.WritePropertyName("v"); + serializer.Serialize(writer, pair.Value); + writer.WriteEndObject(); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + else if (value is CaseSensitiveDictionaryContextData caseSensitiveDictionaryData) + { + writer.WriteStartObject(); + writer.WritePropertyName("t"); + writer.WriteValue(PipelineContextDataType.CaseSensitiveDictionary); + if (caseSensitiveDictionaryData.Count > 0) + { + writer.WritePropertyName("d"); + writer.WriteStartArray(); + foreach (var pair in caseSensitiveDictionaryData) + { + writer.WriteStartObject(); + writer.WritePropertyName("k"); + writer.WriteValue(pair.Key); + writer.WritePropertyName("v"); + serializer.Serialize(writer, pair.Value); + writer.WriteEndObject(); + } + writer.WriteEndArray(); + } + writer.WriteEndObject(); + } + else + { + throw new NotSupportedException($"Unexpected type '{value.GetType().Name}'"); + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataType.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataType.cs new file mode 100644 index 00000000000..0053ff5e045 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/PipelineContextDataType.cs @@ -0,0 +1,19 @@ +using System; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + internal static class PipelineContextDataType + { + internal const Int32 String = 0; + + internal const Int32 Array = 1; + + internal const Int32 Dictionary = 2; + + internal const Int32 Boolean = 3; + + internal const Int32 Number = 4; + + internal const Int32 CaseSensitiveDictionary = 5; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/StringContextData.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/StringContextData.cs new file mode 100644 index 00000000000..5f12d2f782c --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/StringContextData.cs @@ -0,0 +1,76 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class StringContextData : PipelineContextData, IString + { + public StringContextData(String value) + : base(PipelineContextDataType.String) + { + m_value = value; + } + + public String Value + { + get + { + if (m_value == null) + { + m_value = String.Empty; + } + + return m_value; + } + } + + public override PipelineContextData Clone() + { + return new StringContextData(m_value); + } + + public override JToken ToJToken() + { + return (JToken)m_value; + } + + String IString.GetString() + { + return Value; + } + + public override String ToString() + { + return Value; + } + + public static implicit operator String(StringContextData data) + { + return data.Value; + } + + public static implicit operator StringContextData(String data) + { + return new StringContextData(data); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_value?.Length == 0) + { + m_value = null; + } + } + + [DataMember(Name = "s", EmitDefaultValue = false)] + private String m_value; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/TemplateMemoryExtensions.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/TemplateMemoryExtensions.cs new file mode 100644 index 00000000000..f35297465d9 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/TemplateMemoryExtensions.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.ObjectTemplating; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + internal static class TemplateMemoryExtensions + { + internal static void AddBytes( + this TemplateMemory memory, + PipelineContextData value, + Boolean traverse) + { + var bytes = CalculateBytes(memory, value, traverse); + memory.AddBytes(bytes); + } + + internal static Int32 CalculateBytes( + this TemplateMemory memory, + PipelineContextData value, + Boolean traverse) + { + var enumerable = traverse ? value.Traverse() : new[] { value } as IEnumerable; + var result = 0; + foreach (var item in enumerable) + { + // This measurement doesn't have to be perfect + // https://codeblog.jonskeet.uk/2011/04/05/of-memory-and-strings/ + switch (item?.Type) + { + case PipelineContextDataType.String: + var str = item.AssertString("string").Value; + checked + { + result += TemplateMemory.MinObjectSize + TemplateMemory.StringBaseOverhead + ((str?.Length ?? 0) * sizeof(Char)); + } + break; + + case PipelineContextDataType.Array: + case PipelineContextDataType.Dictionary: + case PipelineContextDataType.Boolean: + case PipelineContextDataType.Number: + // Min object size is good enough. Allows for base + a few fields. + checked + { + result += TemplateMemory.MinObjectSize; + } + break; + + case null: + checked + { + result += IntPtr.Size; + } + break; + + default: + throw new NotSupportedException($"Unexpected pipeline context data type '{item.Type}'"); + } + } + + return result; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextData/TemplateTokenExtensions.cs b/src/Sdk/DTPipelines/Pipelines/ContextData/TemplateTokenExtensions.cs new file mode 100644 index 00000000000..79cc7984b22 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextData/TemplateTokenExtensions.cs @@ -0,0 +1,78 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class TemplateTokenExtensions + { + [EditorBrowsable(EditorBrowsableState.Never)] + public static StringContextData ToContextData(this LiteralToken literal) + { + var token = literal as TemplateToken; + var contextData = token.ToContextData(); + return contextData.AssertString("converted literal token"); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static ArrayContextData ToContextData(this SequenceToken sequence) + { + var token = sequence as TemplateToken; + var contextData = token.ToContextData(); + return contextData.AssertArray("converted sequence token"); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static PipelineContextData ToContextData(this TemplateToken token) + { + switch (token.Type) + { + case TokenType.Mapping: + var mapping = token as MappingToken; + var dictionary = new DictionaryContextData(); + if (mapping.Count > 0) + { + foreach (var pair in mapping) + { + var keyLiteral = pair.Key.AssertString("dictionary context data key"); + var key = keyLiteral.Value; + var value = pair.Value.ToContextData(); + dictionary.Add(key, value); + } + } + return dictionary; + + case TokenType.Sequence: + var sequence = token as SequenceToken; + var array = new ArrayContextData(); + if (sequence.Count > 0) + { + foreach (var item in sequence) + { + array.Add(item.ToContextData()); + } + } + return array; + + case TokenType.Null: + return null; + + case TokenType.Boolean: + var boolean = token as BooleanToken; + return new BooleanContextData(boolean.Value); + + case TokenType.Number: + var number = token as NumberToken; + return new NumberContextData(number.Value); + + case TokenType.String: + var stringToken = token as StringToken; + return new StringContextData(stringToken.Value); + + default: + throw new NotSupportedException($"Unexpected {nameof(TemplateToken)} type '{token.Type}'"); + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContextScope.cs b/src/Sdk/DTPipelines/Pipelines/ContextScope.cs new file mode 100644 index 00000000000..14a23c48552 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContextScope.cs @@ -0,0 +1,53 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ContextScope + { + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + [IgnoreDataMember] + public String ContextName + { + get + { + var index = Name.LastIndexOf('.'); + if (index >= 0) + { + return Name.Substring(index + 1); + } + + return Name; + } + } + + [IgnoreDataMember] + public String ParentName + { + get + { + var index = Name.LastIndexOf('.'); + if (index >= 0) + { + return Name.Substring(0, index); + } + + return String.Empty; + } + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken Inputs { get; set; } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken Outputs { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ContinuousIntegrationTrigger.cs b/src/Sdk/DTPipelines/Pipelines/ContinuousIntegrationTrigger.cs new file mode 100644 index 00000000000..1cc138e7750 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ContinuousIntegrationTrigger.cs @@ -0,0 +1,89 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class ContinuousIntegrationTrigger : PipelineTrigger + { + public ContinuousIntegrationTrigger() + : base(PipelineTriggerType.ContinuousIntegration) + { + Enabled = true; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean Enabled + { + get; + set; + } + + /// + /// Indicates whether changes should be batched while another CI pipeline is running. + /// + /// + /// If this is true, then changes submitted while a CI pipeline is running will be batched and built in one new CI pipeline when the current pipeline finishes. + /// If this is false, then a new CI pipeline will be triggered for each change to the repository. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean BatchChanges + { + get; + set; + } + + /// + /// A list of filters that describe which branches will trigger pipelines. + /// + public IList BranchFilters + { + get + { + if (m_branchFilters == null) + { + m_branchFilters = new List(); + } + return m_branchFilters; + } + } + + /// + /// A list of filters that describe which paths will trigger pipelines. + /// + public IList PathFilters + { + get + { + if (m_pathFilters == null) + { + m_pathFilters = new List(); + } + return m_pathFilters; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_branchFilters?.Count == 0) + { + m_branchFilters = null; + } + + if (m_pathFilters?.Count == 0) + { + m_pathFilters = null; + } + } + + [DataMember(Name = "BranchFilters", EmitDefaultValue = false)] + private List m_branchFilters; + + [DataMember(Name = "PathFilters", EmitDefaultValue = false)] + private List m_pathFilters; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/CounterStore.cs b/src/Sdk/DTPipelines/Pipelines/CounterStore.cs new file mode 100644 index 00000000000..b77ad8a2568 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/CounterStore.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a default implementation of a counter store. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class CounterStore : ICounterStore + { + public CounterStore( + IDictionary counters = null, + ICounterResolver resolver = null) + { + if (counters?.Count > 0) + { + m_counters.AddRange(counters); + } + + this.Resolver = resolver; + } + + public IReadOnlyDictionary Counters + { + get + { + return m_counters; + } + } + + private ICounterResolver Resolver + { + get; + } + + public Int32 Increment( + IPipelineContext context, + String prefix, + Int32 seed) + { + if (m_counters.TryGetValue(prefix, out Int32 existingValue)) + { + return existingValue; + } + + Int32 newValue = seed; + if (this.Resolver != null) + { + newValue = this.Resolver.Increment(context, prefix, seed); + m_counters[prefix] = newValue; + } + + return newValue; + } + + private readonly Dictionary m_counters = new Dictionary(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/CreateJobResult.cs b/src/Sdk/DTPipelines/Pipelines/CreateJobResult.cs new file mode 100644 index 00000000000..2802869c043 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/CreateJobResult.cs @@ -0,0 +1,50 @@ +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public struct CreateJobResult + { + public CreateJobResult( + JobExecutionContext context, + Job job) + { + this.Job = job; + this.Context = context; + } + + public Job Job + { + get; + } + + public JobExecutionContext Context + { + get; + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public struct CreateTaskResult + { + public CreateTaskResult( + TaskStep task, + TaskDefinition definition) + { + this.Task = task; + this.Definition = definition; + } + + public TaskStep Task + { + get; + } + + public TaskDefinition Definition + { + get; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/DeploymentExecutionOptions.cs b/src/Sdk/DTPipelines/Pipelines/DeploymentExecutionOptions.cs new file mode 100644 index 00000000000..5ec777815d1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/DeploymentExecutionOptions.cs @@ -0,0 +1,74 @@ +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Validation; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + internal enum DeploymentRollingOption + { + [EnumMember] + Absolute, + + [EnumMember] + Percentage + } + + [DataContract] + internal class DeploymentExecutionOptions + { + public DeploymentExecutionOptions() + { + } + + private DeploymentExecutionOptions(DeploymentExecutionOptions optionsToCopy) + { + this.RollingOption = optionsToCopy.RollingOption; + this.RollingValue = optionsToCopy.RollingValue; + } + + [DataMember] + public DeploymentRollingOption RollingOption + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public uint RollingValue + { + get; + set; + } + + public DeploymentExecutionOptions Clone() + { + return new DeploymentExecutionOptions(this); + } + + public void Validate( + IPipelineContext context, + ValidationResult result) + { + switch (RollingOption) + { + case DeploymentRollingOption.Absolute: + if (RollingValue == 0) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.InvalidAbsoluteRollingValue())); + } + break; + + case DeploymentRollingOption.Percentage: + if (RollingValue == 0 || RollingValue > 100) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.InvalidPercentageRollingValue())); + } + break; + + default: + result.Errors.Add(new PipelineValidationError(PipelineStrings.InvalidRollingOption(RollingOption))); + break; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/DeploymentGroupTarget.cs b/src/Sdk/DTPipelines/Pipelines/DeploymentGroupTarget.cs new file mode 100644 index 00000000000..9bd5cc6e04b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/DeploymentGroupTarget.cs @@ -0,0 +1,158 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + internal class DeploymentGroupTarget : PhaseTarget + { + public DeploymentGroupTarget() + : base(PhaseTargetType.DeploymentGroup) + { + } + + private DeploymentGroupTarget(DeploymentGroupTarget targetToClone) + : base(targetToClone) + { + this.DeploymentGroup = targetToClone.DeploymentGroup?.Clone(); + this.Execution = targetToClone.Execution?.Clone(); + + if (targetToClone.m_tags != null && targetToClone.m_tags.Count > 0) + { + m_tags = new HashSet(targetToClone.m_tags, StringComparer.OrdinalIgnoreCase); + } + } + + [DataMember] + public DeploymentGroupReference DeploymentGroup + { + get; + set; + } + + public ISet Tags + { + get + { + if (m_tags == null) + { + m_tags = new HashSet(StringComparer.OrdinalIgnoreCase); + } + return m_tags; + } + } + + /// + /// Gets targets Ids filter on which deployment should be done. + /// + public List TargetIds + { + get + { + if (m_targetIds == null) + { + m_targetIds = new List(); + } + return m_targetIds; + } + } + + [DataMember(EmitDefaultValue = false)] + public DeploymentExecutionOptions Execution + { + get; + set; + } + + public override PhaseTarget Clone() + { + return new DeploymentGroupTarget(this); + } + + public override Boolean IsValid(TaskDefinition task) + { + return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnDeploymentGroup, StringComparer.OrdinalIgnoreCase); + } + + internal override void Validate( + IPipelineContext context, + BuildOptions buildOptions, + ValidationResult result, + IList steps, + ISet taskDemands) + { + this.Execution?.Validate(context, result); + } + + internal override JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + String jobName, + Int32 attempt, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory) + { + context.Trace?.EnterProperty("CreateJobContext"); + var result = new ParallelExecutionOptions().CreateJobContext( + context, + jobName, + attempt, + null, + null, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + jobFactory); + context.Trace?.LeaveProperty("CreateJobContext"); + return result; + } + + internal override ExpandPhaseResult Expand( + PhaseExecutionContext context, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory, + JobExpansionOptions options) + { + context.Trace?.EnterProperty("Expand"); + var result = new ParallelExecutionOptions().Expand( + context: context, + container: null, + sidecarContainers: null, + continueOnError: continueOnError, + timeoutInMinutes: timeoutInMinutes, + cancelTimeoutInMinutes: cancelTimeoutInMinutes, + jobFactory: jobFactory, + options: options); + context.Trace?.LeaveProperty("Expand"); + return result; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_tags?.Count == 0) + { + m_tags = null; + } + + if (m_targetIds?.Count == 0) + { + m_targetIds = null; + } + } + + [DataMember(Name = "Tags", EmitDefaultValue = false)] + private ISet m_tags; + + [DataMember(Name = "TargetIds")] + private List m_targetIds; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Environment/EnvironmentReference.cs b/src/Sdk/DTPipelines/Pipelines/Environment/EnvironmentReference.cs new file mode 100644 index 00000000000..93e441d751e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Environment/EnvironmentReference.cs @@ -0,0 +1,38 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class EnvironmentReference : ResourceReference + { + public EnvironmentReference() + { + } + + private EnvironmentReference(EnvironmentReference referenceToCopy) + : base(referenceToCopy) + { + this.Id = referenceToCopy.Id; + } + + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + public EnvironmentReference Clone() + { + return new EnvironmentReference(this); + } + + public override String ToString() + { + return base.ToString() ?? this.Id.ToString(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/EnvironmentDeploymentTarget.cs b/src/Sdk/DTPipelines/Pipelines/EnvironmentDeploymentTarget.cs new file mode 100644 index 00000000000..c34748631c9 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/EnvironmentDeploymentTarget.cs @@ -0,0 +1,21 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class EnvironmentDeploymentTarget + { + [DataMember] + public Int32 EnvironmentId { get; set; } + + [DataMember] + public String EnvironmentName { get; set; } + + [DataMember] + public EnvironmentResourceReference Resource { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/EnvironmentStore.cs b/src/Sdk/DTPipelines/Pipelines/EnvironmentStore.cs new file mode 100644 index 00000000000..b0e96bf65d8 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/EnvironmentStore.cs @@ -0,0 +1,97 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class EnvironmentStore : IEnvironmentStore + { + public EnvironmentStore( + IList environments, + IEnvironmentResolver resolver = null) + { + m_resolver = resolver; + m_environmentsByName = new Dictionary(StringComparer.OrdinalIgnoreCase); + m_environmentsById = new Dictionary(); + Add(environments?.ToArray()); + } + + public void Add(params EnvironmentInstance[] environments) + { + if (environments is null) + { + return; + } + foreach (var e in environments) + { + if (e != null) + { + m_environmentsById[e.Id] = e; + + var name = e.Name; + if (!string.IsNullOrWhiteSpace(name)) + { + m_environmentsByName[name] = e; + } + } + } + } + + public EnvironmentInstance ResolveEnvironment(String name) + { + if (!m_environmentsByName.TryGetValue(name, out var environment) + && m_resolver != null) + { + environment = m_resolver?.Resolve(name); + Add(environment); + } + + return environment; + } + + public EnvironmentInstance ResolveEnvironment(Int32 id) + { + if (!m_environmentsById.TryGetValue(id, out var environment) + && m_resolver != null) + { + environment = m_resolver?.Resolve(id); + Add(environment); + } + + return environment; + } + + public EnvironmentInstance Get(EnvironmentReference reference) + { + if (reference is null) + { + return null; + } + + if (reference.Name?.IsLiteral == true) + { + return ResolveEnvironment(reference.Name.Literal); + } + + return ResolveEnvironment(reference.Id); + } + + public IList GetReferences() + { + return m_environmentsById.Values + .Select(x => new EnvironmentReference + { + Id = x.Id, + Name = x.Name + }) + .ToList(); + } + + private IEnvironmentResolver m_resolver; + private IDictionary m_environmentsByName; + private IDictionary m_environmentsById; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ExecutionOptions.cs b/src/Sdk/DTPipelines/Pipelines/ExecutionOptions.cs new file mode 100644 index 00000000000..6da517f9b56 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ExecutionOptions.cs @@ -0,0 +1,107 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for controlling runtime behaviors. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class ExecutionOptions + { + public ExecutionOptions() + { + } + + /// + /// Gets or sets a value indicating whether or not to remove secrets from job message. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean RestrictSecrets + { + get; + set; + } + + /// + /// Gets or sets a value indicating what scope the system jwt token will have. + /// + [DataMember(EmitDefaultValue = false)] + public String SystemTokenScope + { + get; + set; + } + + /// + /// Gets or sets value indicating any custom claims the system jwt token will have. + /// + public IDictionary SystemTokenCustomClaims + { + get + { + if (m_systemTokenCustomClaims == null) + { + m_systemTokenCustomClaims = new Dictionary(); + } + return m_systemTokenCustomClaims; + } + } + + /// + /// Gets or sets a value indicating what's the max number jobs we allow after expansion. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? MaxJobExpansion + { + get; + set; + } + + /// + /// Gets or sets a value indicating the max parallelism slots available to overwrite MaxConcurrency of test job slicing + /// + [DataMember(EmitDefaultValue = false)] + public Int32? MaxParallelism + { + get; + set; + } + + /// + /// Gets or sets a value indicating if we should allow expressions to define secured resources. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean EnableResourceExpressions + { + get; + set; + } + + /// + /// Driven by FF: DistributedTask.LegalNodeNames + /// + [DataMember(EmitDefaultValue = false)] + public Boolean EnforceLegalNodeNames + { + get; + set; + } + + /// + /// Allows hyphens in yaml names + /// + [DataMember(EmitDefaultValue = false)] + public Boolean AllowHyphenNames + { + get; + set; + } + + [DataMember(Name = nameof(SystemTokenCustomClaims), EmitDefaultValue = false)] + private IDictionary m_systemTokenCustomClaims; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ExpandPhaseResult.cs b/src/Sdk/DTPipelines/Pipelines/ExpandPhaseResult.cs new file mode 100644 index 00000000000..11c5e736bb7 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ExpandPhaseResult.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Runtime; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Represents the runtime values of a phase which has been expanded for execution. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class ExpandPhaseResult + { + /// + /// Initializes a new ExpandPhaseResult innstance with a default maximum concurrency of 1. + /// + public ExpandPhaseResult() + { + this.MaxConcurrency = 1; + } + + /// + /// Gets or sets the execution behavior when an error is encountered. + /// + public Boolean ContinueOnError + { + get; + set; + } + + /// + /// Gets or sets the execution behavior when an error is encountered. + /// + public Boolean FailFast + { + get; + set; + } + + /// + /// Gets or sets the maximum concurrency for the jobs. + /// + public Int32 MaxConcurrency + { + get; + set; + } + + /// + /// Gets the list of jobs for this phase. + /// + public IList Jobs + { + get + { + if (m_jobs == null) + { + m_jobs = new List(); + } + return m_jobs; + } + } + + private List m_jobs; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ExpressionResult.cs b/src/Sdk/DTPipelines/Pipelines/ExpressionResult.cs new file mode 100644 index 00000000000..289881008fc --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ExpressionResult.cs @@ -0,0 +1,58 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Represents the result of an ExpressionValue<T> evaluation. + /// + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ExpressionResult + { + /// + /// Initializes a new ExpressionResult instance with the specified value. The value is implicilty treated as + /// non-secret. + /// + /// The resolved value + public ExpressionResult(T value) + : this(value, false) + { + } + + /// + /// Initializes a new ExpressionResult instance with the specified values. + /// + /// The resolved value + /// True if secrets were accessed while resolving the value; otherwise, false + public ExpressionResult( + T value, + Boolean containsSecrets) + { + this.ContainsSecrets = containsSecrets; + this.Value = value; + } + + /// + /// Gets or sets a value indicating whether or not secrets were accessed while resolving . + /// + [DataMember(EmitDefaultValue = false)] + public Boolean ContainsSecrets + { + get; + set; + } + + /// + /// Gets or sets the literal value result. + /// + [DataMember] + public T Value + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ExpressionValue.cs b/src/Sdk/DTPipelines/Pipelines/ExpressionValue.cs new file mode 100644 index 00000000000..e23c04bc801 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ExpressionValue.cs @@ -0,0 +1,311 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for performing delayed evaluation of a value based on the environment context as runtime. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ExpressionValue + { + public static Boolean IsExpression(String value) + { + return !String.IsNullOrEmpty(value) && + value.Length > 3 && + value.StartsWith("$[", StringComparison.Ordinal) && + value.EndsWith("]", StringComparison.Ordinal); + } + + /// + /// Attempts to parse the specified string as an expression value. + /// + /// The expected type of the expression result + /// The expression string + /// The value which was parsed, if any + /// True if the value was successfully parsed; otherwise, false + public static Boolean TryParse( + String expression, + out ExpressionValue value) + { + if (IsExpression(expression)) + { + value = new ExpressionValue(expression, isExpression: true); + } + else + { + value = null; + } + return value != null; + } + + /// + /// Creates an ExpressionValue from expression string. + /// Returns null if argument is not an expression + /// + public static ExpressionValue FromExpression(String expression) + { + return new ExpressionValue(expression, isExpression: true); + } + + /// + /// Creates an ExpressionValue from literal. + /// + public static ExpressionValue FromLiteral(T literal) + { + return new ExpressionValue(literal); + } + + /// + /// When T is String, we cannot distiguish between literals and expressions solely by type. + /// Use this function when parsing and you want to err on the side of expressions. + /// + public static ExpressionValue FromToken(String token) + { + if (ExpressionValue.IsExpression(token)) + { + return ExpressionValue.FromExpression(token); + } + + return ExpressionValue.FromLiteral(token); + } + + internal static String TrimExpression(String value) + { + var expression = value.Substring(2, value.Length - 3).Trim(); + if (String.IsNullOrEmpty(expression)) + { + throw new ArgumentException(PipelineStrings.ExpressionInvalid(value)); + } + return expression; + } + } + + /// + /// Provides a mechanism for performing delayed evaluation of a value based on the environment context at runtime. + /// + /// The type of value + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ExpressionValue : ExpressionValue, IEquatable> + { + /// + /// Initializes a new ExpressionValue instance with the specified literal value. + /// + /// The literal value which should be used + public ExpressionValue(T literalValue) + { + m_literalValue = literalValue; + } + + /// + /// Initializes a new ExpressionValue with the given expression. + /// Throws if expression is invalid. + /// + /// The expression to be used + /// This parameter is unused other than to discriminate this constructor from the literal constructor + internal ExpressionValue( + String expression, + Boolean isExpression) + { + if (!IsExpression(expression)) + { + throw new ArgumentException(PipelineStrings.ExpressionInvalid(expression)); + } + m_expression = ExpressionValue.TrimExpression(expression); + } + + [JsonConstructor] + private ExpressionValue() + { + } + + internal T Literal + { + get + { + return m_literalValue; + } + } + + internal String Expression + { + get + { + return m_expression; + } + } + + /// + /// Gets a value indicating whether or not the expression is backed by a literal value. + /// + internal Boolean IsLiteral => String.IsNullOrEmpty(m_expression); + + /// + /// Retrieves the referenced value from the provided execution context. + /// + /// The execution context used for variable resolution + /// The value of the variable if found; otherwise, null + public ExpressionResult GetValue(IPipelineContext context = null) + { + if (this.IsLiteral) + { + return new ExpressionResult(m_literalValue, containsSecrets: false); + } + + if (context != null) + { + return context.Evaluate(m_expression); + } + + return null; + } + + /// + /// Converts the value to a string representation. + /// + /// A string representation of the current value + public override String ToString() + { + if (!String.IsNullOrEmpty(m_expression)) + { + return String.Concat("$[ ", m_expression, " ]"); + } + else + { + return m_literalValue?.ToString(); + } + } + + /// + /// Provides automatic conversion of a literal value into a pipeline value for convenience. + /// + /// The value which the pipeline value represents + public static implicit operator ExpressionValue(T value) + { + return new ExpressionValue(value); + } + + public Boolean Equals(ExpressionValue rhs) + { + if (rhs is null) + { + return false; + } + + if (ReferenceEquals(this, rhs)) + { + return true; + } + + if (IsLiteral) + { + return EqualityComparer.Default.Equals(this.Literal, rhs.Literal); + } + else + { + return this.Expression == rhs.Expression; + } + } + + public override Boolean Equals(object obj) + { + return Equals(obj as ExpressionValue); + } + + public static Boolean operator ==(ExpressionValue lhs, ExpressionValue rhs) + { + if (lhs is null) + { + return rhs is null; + } + + return lhs.Equals(rhs); + } + public static Boolean operator !=(ExpressionValue lhs, ExpressionValue rhs) + { + return !(lhs == rhs); + } + + public override Int32 GetHashCode() + { + if (IsLiteral) + { + if (Literal != null) + { + return Literal.GetHashCode(); + } + } + else if (Expression != null) + { + return Expression.GetHashCode(); + } + + return 0; // unspecified expression values are all the same. + } + + [DataMember(Name = "LiteralValue", EmitDefaultValue = false)] + private readonly T m_literalValue; + + [DataMember(Name = "VariableValue", EmitDefaultValue = false)] + private readonly String m_expression; + } + + internal class ExpressionValueJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return objectType.GetTypeInfo().Equals(typeof(String).GetTypeInfo()) || typeof(T).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.String) + { + // string types are either expressions of any type T, or literals of type String + var s = (String)(Object)reader.Value; + if (ExpressionValue.IsExpression(s)) + { + return ExpressionValue.FromExpression(s); + } + else + { + return new ExpressionValue(s); + } + } + else + { + var parsedValue = serializer.Deserialize(reader); + return new ExpressionValue(parsedValue); + } + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + if (value is ExpressionValue expressionValue) + { + if (!String.IsNullOrEmpty(expressionValue.Expression)) + { + serializer.Serialize(writer, $"$[ {expressionValue.Expression} ]"); + } + else + { + serializer.Serialize(writer, expressionValue.Literal); + } + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/CounterNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/CounterNode.cs new file mode 100644 index 00000000000..ad18857eae7 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/CounterNode.cs @@ -0,0 +1,28 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class CounterNode : FunctionNode + { + protected override Object EvaluateCore(EvaluationContext evaluationContext) + { + int seed = 0; + var prefix = String.Empty; + if (Parameters.Count > 0) + { + prefix = Parameters[0].EvaluateString(evaluationContext); + } + + if (Parameters.Count > 1) + { + seed = Convert.ToInt32(Parameters[1].EvaluateNumber(evaluationContext)); + } + + var context = evaluationContext.State as IPipelineContext; + return context.CounterStore?.Increment(context, prefix, seed) ?? seed; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/ExpressionConstants.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/ExpressionConstants.cs new file mode 100644 index 00000000000..ad387809804 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/ExpressionConstants.cs @@ -0,0 +1,30 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ExpressionConstants + { + /// + /// Gets the name of the variables node. + /// + public static readonly String Variables = "variables"; + + /// + /// Gets the pipeline context available in pipeline expressions. + /// + public static readonly INamedValueInfo PipelineNamedValue = new NamedValueInfo("pipeline"); + + /// + /// Gets the variable context available in pipeline expressions. + /// + public static readonly INamedValueInfo VariablesNamedValue = new NamedValueInfo("variables"); + + /// + /// Gets the counter function available in pipeline expressions. + /// + public static readonly IFunctionInfo CounterFunction = new FunctionInfo("counter", 0, 2); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/InputValidationConstants.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/InputValidationConstants.cs new file mode 100644 index 00000000000..657eb4aaadb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/InputValidationConstants.cs @@ -0,0 +1,32 @@ +using System; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + internal static class InputValidationConstants + { + public static readonly String IsEmail = "isEmail"; + public static readonly String IsInRange = "isInRange"; + public static readonly String IsIPv4Address = "isIPv4Address"; + public static readonly String IsSha1 = "isSha1"; + public static readonly String IsUrl = "isUrl"; + public static readonly String IsMatch = "isMatch"; + public static readonly String Length = "length"; + + public static readonly IFunctionInfo[] Functions = new IFunctionInfo[] + { + new FunctionInfo(InputValidationConstants.IsEmail, IsEmailNode.minParameters, IsEmailNode.maxParameters), + new FunctionInfo(InputValidationConstants.IsInRange, IsInRangeNode.minParameters, IsInRangeNode.maxParameters), + new FunctionInfo(InputValidationConstants.IsIPv4Address, IsIPv4AddressNode.minParameters, IsIPv4AddressNode.maxParameters), + new FunctionInfo(InputValidationConstants.IsMatch, IsMatchNode.minParameters, IsMatchNode.maxParameters), + new FunctionInfo(InputValidationConstants.IsSha1, IsSHA1Node.minParameters, IsSHA1Node.maxParameters), + new FunctionInfo(InputValidationConstants.IsUrl, IsUrlNode.minParameters, IsUrlNode.maxParameters), + new FunctionInfo(InputValidationConstants.Length, LengthNode.minParameters, LengthNode.maxParameters), + }; + + public static readonly INamedValueInfo[] NamedValues = new INamedValueInfo[] + { + new NamedValueInfo("value"), + }; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/InputValueNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/InputValueNode.cs new file mode 100644 index 00000000000..9c7fd593032 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/InputValueNode.cs @@ -0,0 +1,15 @@ +using System; +using GitHub.DistributedTask.Expressions; +using GitHub.DistributedTask.Pipelines.Validation; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + internal class InputValueNode : NamedValueNode + { + protected sealed override Object EvaluateCore(EvaluationContext evaluationContext) + { + var validationContext = evaluationContext.State as InputValidationContext; + return validationContext.Value; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/IsEmailNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/IsEmailNode.cs new file mode 100644 index 00000000000..39ac71010ab --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/IsEmailNode.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class IsEmailNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 1; + public static Int32 maxParameters = 1; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // isEmail(value: string) + String value = Parameters[0].EvaluateString(context) ?? String.Empty; + return RegexUtility.IsMatch(value, WellKnownRegularExpressions.Email); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/IsIPv4AddressNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/IsIPv4AddressNode.cs new file mode 100644 index 00000000000..4758332138d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/IsIPv4AddressNode.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class IsIPv4AddressNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 1; + public static Int32 maxParameters = 1; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // isIpV4Address(value: string) + String value = Parameters[0].EvaluateString(context) ?? String.Empty; + return RegexUtility.IsMatch(value, WellKnownRegularExpressions.IPv4Address); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/IsInRangeNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/IsInRangeNode.cs new file mode 100644 index 00000000000..e1859894985 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/IsInRangeNode.cs @@ -0,0 +1,24 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class IsInRangeNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 3; + public static Int32 maxParameters = 3; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // isInRange(value: string, min: string, max: string) + decimal value = Parameters[0].EvaluateNumber(context); + decimal min = Parameters[1].EvaluateNumber(context); + decimal max = Parameters[2].EvaluateNumber(context); + return value >= min && value <= max; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/IsMatchNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/IsMatchNode.cs new file mode 100644 index 00000000000..7a45f353c12 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/IsMatchNode.cs @@ -0,0 +1,30 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class IsMatchNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 2; + public static Int32 maxParameters = 3; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // isMatch(value: string, regEx: string, options?: string) + String value = Parameters[0].EvaluateString(context) ?? String.Empty; + String regEx = Parameters[1].EvaluateString(context) ?? String.Empty; + String regExOptionsString = String.Empty; + + if (Parameters.Count == 3) + { + regExOptionsString = Parameters[2].EvaluateString(context) ?? String.Empty; + } + + return RegexUtility.IsMatch(value, regEx, regExOptionsString); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/IsSHA1Node.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/IsSHA1Node.cs new file mode 100644 index 00000000000..ecc4d8a052c --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/IsSHA1Node.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class IsSHA1Node : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 1; + public static Int32 maxParameters = 1; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // isSha1(value: string) + String value = Parameters[0].EvaluateString(context) ?? String.Empty; + return RegexUtility.IsMatch(value, WellKnownRegularExpressions.SHA1); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/IsUrlNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/IsUrlNode.cs new file mode 100644 index 00000000000..9c94e77ab14 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/IsUrlNode.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class IsUrlNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 1; + public static Int32 maxParameters = 1; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // isUrl(value: string) + String value = Parameters[0].EvaluateString(context) ?? String.Empty; + return RegexUtility.IsMatch(value, WellKnownRegularExpressions.Url); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/LengthNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/LengthNode.cs new file mode 100644 index 00000000000..0675c001f72 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/LengthNode.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class LengthNode : FunctionNode + { + protected sealed override Boolean TraceFullyRealized => false; + + public static Int32 minParameters = 1; + public static Int32 maxParameters = 1; + + protected sealed override Object EvaluateCore(EvaluationContext context) + { + // Length(value: object) + var evaluationResult = Parameters[0].Evaluate(context); + bool kindNotSupported = false; + Int32 length = -1; + + switch (evaluationResult.Kind) + { + case ValueKind.Array: + length = ((JArray)evaluationResult.Value).Count; + break; + case ValueKind.String: + length = ((String)evaluationResult.Value).Length; + break; + case ValueKind.Object: + if (evaluationResult.Value is IReadOnlyDictionary) + { + length = ((IReadOnlyDictionary)evaluationResult.Value).Count; + } + else if (evaluationResult.Value is ICollection) + { + length = ((ICollection)evaluationResult.Value).Count; + } + else + { + kindNotSupported = true; + } + break; + case ValueKind.Boolean: + case ValueKind.Null: + case ValueKind.Number: + case ValueKind.Version: + kindNotSupported = true; + break; + } + + if (kindNotSupported) + { + throw new NotSupportedException(PipelineStrings.InvalidTypeForLengthFunction(evaluationResult.Kind)); + } + + return new Decimal(length); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/PipelineContextNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/PipelineContextNode.cs new file mode 100644 index 00000000000..6e35657bfab --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/PipelineContextNode.cs @@ -0,0 +1,32 @@ +using System; +using System.ComponentModel; +using System.Collections.Generic; +using GitHub.DistributedTask.Expressions; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal sealed class PipelineContextNode : NamedValueNode + { + protected override Object EvaluateCore(EvaluationContext context) + { + var state = context.State as IPipelineContext; + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + // startTime + if (state.Variables.TryGetValue(WellKnownDistributedTaskVariables.PipelineStartTime, out VariableValue startTimeVariable) && + !String.IsNullOrEmpty(startTimeVariable.Value)) + { + // Leverage the expression SDK to convert to datetime + var startTimeResult = EvaluationResult.CreateIntermediateResult(context, startTimeVariable.Value, out _); + if (startTimeResult.TryConvertToDateTime(context, out DateTimeOffset startTime)) + { + result["startTime"] = startTime; + } + } + + return result; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/RegexUtility.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/RegexUtility.cs new file mode 100644 index 00000000000..1d415797273 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/RegexUtility.cs @@ -0,0 +1,153 @@ +using System; +using System.Text.RegularExpressions; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + public static class RegexUtility + { + /// + /// Gets default timeout for regex + /// + /// + public static TimeSpan GetRegexTimeOut() + { + return s_regexTimeout; + } + + /// + /// Performs regex single match with ECMAScript-complaint behavior + /// Will throw RegularExpressionFailureException if regular expression parsing error occurs or if regular expression takes more than allotted time to execute + /// Supported regex options - 'i' (ignorecase), 'm' (multiline) + /// + /// + /// + /// + /// + public static bool IsMatch( + String value, + String regexPattern, + String regexOptionsString) + { + return IsSafeMatch(value, regexPattern, ConvertToRegexOptions(regexOptionsString)); + } + + /// + /// Performs regex single match with ECMAScript-complaint behavior + /// Will throw RegularExpressionFailureException if regular expression parsing error occurs or if regular expression takes more than allotted time to execute + /// If the key is not known, returns true + /// + /// + /// One of WellKnownRegularExpressionKeys + /// + public static bool IsMatch( + String value, + String wellKnownRegexKey) + { + Lazy lazyRegex = WellKnownRegularExpressions.GetRegex(wellKnownRegexKey); + if (lazyRegex == null) + { + return true; + } + + Regex regex = lazyRegex.Value; + return IsSafeMatch(value, x => regex.Match(value)); + } + + /// + /// Converts regex in string to RegExOptions, valid flags are "i", "m" + /// Throws RegularExpressionInvalidOptionsException if there are any invalid options + /// + /// + /// + public static RegexOptions ConvertToRegexOptions(String regexOptions) + { + RegexOptions result; + if (TryConvertToRegexOptions(regexOptions, out result)) + { + return result; + } + + throw new RegularExpressionInvalidOptionsException(PipelineStrings.InvalidRegexOptions(regexOptions, String.Join(",", WellKnownRegexOptions.All))); + } + + private static bool TryConvertToRegexOptions( + String regexOptions, + out RegexOptions result) + { + // Eg: "IgnoreCase, MultiLine" or "IgnoreCase" + result = RegexOptions.ECMAScript | RegexOptions.CultureInvariant; + + if (String.IsNullOrEmpty(regexOptions)) + { + return false; + } + + String[] regexOptionValues = regexOptions.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); + + for (int i = 0; i < regexOptionValues.Length; i++) + { + String option = regexOptionValues[i]; + + if (String.Equals(option, WellKnownRegexOptions.IgnoreCase, StringComparison.OrdinalIgnoreCase)) + { + result = result | RegexOptions.IgnoreCase; + } + else if (String.Equals(option, WellKnownRegexOptions.Multiline, StringComparison.OrdinalIgnoreCase)) + { + result = result | RegexOptions.Multiline; + } + else + { + return false; + } + } + + return true; + } + + private static Boolean IsSafeMatch( + String value, + Func getSafeMatch) + { + Boolean result = true; + try + { + var match = getSafeMatch(value); + result = match.Success; + } + catch (Exception ex) when (ex is RegexMatchTimeoutException || ex is ArgumentException) + { + throw new RegularExpressionValidationFailureException(PipelineStrings.RegexFailed(value, ex.Message), ex); + } + + return result; + } + + private static Boolean IsSafeMatch( + String value, + String regex, + RegexOptions regexOptions) + { + return IsSafeMatch(value, x => GetSafeMatch(x, regex, regexOptions)); + } + + private static Match GetSafeMatch( + String value, + String regex, + RegexOptions regexOptions) + { + return Regex.Match(value, regex, regexOptions, s_regexTimeout); + } + + // 2 seconds should be enough mostly, per DataAnnotations class - http://index/?query=REGEX_DEFAULT_MATCH_TIMEOUT + private static TimeSpan s_regexTimeout = TimeSpan.FromSeconds(2); + + private static class WellKnownRegexOptions + { + public static String IgnoreCase = nameof(IgnoreCase); + public static String Multiline = nameof(Multiline); + public static String[] All = new String[] { IgnoreCase, Multiline }; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/VariablesContextNode.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/VariablesContextNode.cs new file mode 100644 index 00000000000..d548f5ab06f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/VariablesContextNode.cs @@ -0,0 +1,16 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class VariablesContextNode : NamedValueNode + { + protected override Object EvaluateCore(EvaluationContext context) + { + var executionContext = context.State as IPipelineContext; + return executionContext.Variables; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Expressions/WellKnownRegularExpressions.cs b/src/Sdk/DTPipelines/Pipelines/Expressions/WellKnownRegularExpressions.cs new file mode 100644 index 00000000000..72899fb4253 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Expressions/WellKnownRegularExpressions.cs @@ -0,0 +1,63 @@ +using System; +using System.Text.RegularExpressions; + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ + public static class WellKnownRegularExpressions + { + public const String Email = nameof(Email); + public const String IPv4Address = nameof(IPv4Address); + public const String SHA1 = nameof(SHA1); + public const String Url = nameof(Url); + + /// + /// Returns null if it's not a well-known type + /// + /// + /// + public static Lazy GetRegex(String regexType) + { + switch (regexType) + { + case Email: + return s_validEmail; + case IPv4Address: + return s_validIPv4Address; + case SHA1: + return s_validSha1; + case Url: + return s_validUrl; + default: + return null; + } + } + + // regex from http://index/?leftProject=System.ComponentModel.DataAnnotations&leftSymbol=cmnlm5e7vdio&file=DataAnnotations%5CEmailAddressAttribute.cs&rightSymbol=jfeiathypuap + private static readonly Lazy s_validEmail = new Lazy(() => new Regex( + @"^((([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+(\.([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+)*)|((\x22)((((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(([\x01-\x08\x0b\x0c\x0e-\x1f\x7f]|\x21|[\x23-\x5b]|[\x5d-\x7e]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(\\([\x01-\x09\x0b\x0c\x0d-\x7f]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))))*(((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(\x22)))@((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?$", + RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, RegexUtility.GetRegexTimeOut() + ) + ); + + // simple check - {1 to 3 digits}.{1 to 3 digits}.{1 to 3 digits}.{1 to 3 digits} + private static readonly Lazy s_validIPv4Address = new Lazy(() => new Regex( + @"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", + RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, RegexUtility.GetRegexTimeOut() + ) + ); + + // 40 hex characters + private static readonly Lazy s_validSha1 = new Lazy(() => new Regex( + @"\b[0-9a-f]{40}\b", + RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, RegexUtility.GetRegexTimeOut() + ) + ); + + // regex from http://index/?leftProject=System.ComponentModel.DataAnnotations&leftSymbol=gk29yrysvq6y&file=DataAnnotations%5CUrlAttribute.cs&line=11 + private static readonly Lazy s_validUrl = new Lazy(() => new Regex( + @"^(https?|ftp):\/\/(((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:)*@)?(((\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5]))|((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?)(:\d*)?)(\/((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)+(\/(([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)*)*)?)?(\?((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|[\uE000-\uF8FF]|\/|\?)*)?(\#((([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(%[\da-f]{2})|[!\$&'\(\)\*\+,;=]|:|@)|\/|\?)*)?$", + RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled, RegexUtility.GetRegexTimeOut() + ) + ); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/GraphCondition.cs b/src/Sdk/DTPipelines/Pipelines/GraphCondition.cs new file mode 100644 index 00000000000..06fd7409ccd --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/GraphCondition.cs @@ -0,0 +1,247 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Text; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.Expressions; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.DistributedTask.Pipelines.Runtime; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class GraphCondition where TInstance : IGraphNodeInstance + { + private protected GraphCondition(String condition) + { + m_condition = !String.IsNullOrEmpty(condition) ? condition : Default; + m_parser = new ExpressionParser(); + m_parsedCondition = m_parser.CreateTree(m_condition, new ConditionTraceWriter(), s_namedValueInfo, FunctionInfo); + } + + /// + /// Gets the default condition if none is specified + /// + public static String Default + { + get + { + return $"{PipelineTemplateConstants.Success}()"; + } + } + + /// + /// Gets a value indicating whether the event payload is used within the condition + /// + public Boolean RequiresEventPayload + { + get + { + CheckRequiredProperties(); + return m_requiresEventPayload.Value; + } + } + + /// + /// Gets a value indicating whether dependency outputs are used within the condition + /// + public Boolean RequiresOutputs + { + get + { + CheckRequiredProperties(); + return m_requiresOutputs.Value; + } + } + + /// + /// Gets a value indicating whether variables are used within the condition + /// + public Boolean RequiresVariables + { + get + { + return false; + } + } + + private void CheckRequiredProperties() + { + var matches = m_parsedCondition.CheckReferencesContext(PipelineTemplateConstants.EventPattern, PipelineTemplateConstants.OutputsPattern); + m_requiresEventPayload = matches[0]; + m_requiresOutputs = matches[1]; + } + + private static IEnumerable GetNeeds( + IReadOnlyList parameters, + EvaluationContext context, + GraphExecutionContext expressionContext) + { + if (expressionContext.Data.TryGetValue(PipelineTemplateConstants.Needs, out var needsData) && + needsData is DictionaryContextData needs) + { + if (parameters.Count == 0) + { + foreach (var pair in needs) + { + yield return pair.Value as DictionaryContextData; + } + } + else + { + foreach (var parameter in parameters) + { + var parameterResult = parameter.Evaluate(context); + var dependencyName = default(String); + if (parameterResult.IsPrimitive) + { + dependencyName = parameterResult.ConvertToString(); + } + + if (!String.IsNullOrEmpty(dependencyName) && + needs.TryGetValue(dependencyName, out var need)) + { + yield return need as DictionaryContextData; + } + else + { + yield return default; + } + } + } + } + } + + private readonly String m_condition; + private readonly ExpressionParser m_parser; + private Boolean? m_requiresEventPayload; + private Boolean? m_requiresOutputs; + protected readonly IExpressionNode m_parsedCondition; + + private static readonly INamedValueInfo[] s_namedValueInfo = new INamedValueInfo[] + { + new NamedValueInfo>(PipelineTemplateConstants.GitHub), + new NamedValueInfo>(PipelineTemplateConstants.Needs), + }; + + public static readonly IFunctionInfo[] FunctionInfo = new IFunctionInfo[] + { + new FunctionInfo(PipelineTemplateConstants.Always, 0, 0), + new FunctionInfo(PipelineTemplateConstants.Failure, 0, Int32.MaxValue), + new FunctionInfo(PipelineTemplateConstants.Cancelled, 0, 0), + new FunctionInfo(PipelineTemplateConstants.Success, 0, Int32.MaxValue), + }; + + protected sealed class ConditionTraceWriter : ITraceWriter + { + public String Trace + { + get + { + return m_info.ToString(); + } + } + + public void Info(String message) + { + m_info.AppendLine(message); + } + + public void Verbose(String message) + { + // Not interested + } + + private StringBuilder m_info = new StringBuilder(); + } + + private sealed class AlwaysFunction : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return true; + } + } + + private sealed class CancelledFunction : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var conditionContext = context.State as GraphExecutionContext; + return conditionContext.State == PipelineState.Canceling; + } + } + + private sealed class FailureFunction : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var conditionContext = context.State as GraphExecutionContext; + if (conditionContext.State != PipelineState.InProgress) + { + return false; + } + + Boolean anyFailed = false; + foreach (var need in GetNeeds(Parameters, context, conditionContext)) + { + if (need == null || + !need.TryGetValue(PipelineTemplateConstants.Result, out var resultData) || + !(resultData is StringContextData resultString)) + { + return false; + } + + if (String.Equals(resultString, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase)) + { + anyFailed = true; + break; + } + } + + return anyFailed; + } + } + + private sealed class SuccessFunction : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var conditionContext = context.State as GraphExecutionContext; + if (conditionContext.State != PipelineState.InProgress) + { + return false; + } + + Boolean allSucceeded = true; + foreach (var need in GetNeeds(Parameters, context, conditionContext)) + { + if (!allSucceeded || + need == null || + !need.TryGetValue(PipelineTemplateConstants.Result, out var resultData) || + !(resultData is StringContextData resultString) || + !String.Equals(resultString, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase)) + { + return false; + } + } + + return true; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/GroupStep.cs b/src/Sdk/DTPipelines/Pipelines/GroupStep.cs new file mode 100644 index 00000000000..c66bc1a3265 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/GroupStep.cs @@ -0,0 +1,86 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class GroupStep : JobStep + { + [JsonConstructor] + public GroupStep() + { + } + + private GroupStep(GroupStep groupStepToClone) + : base(groupStepToClone) + { + if (groupStepToClone.m_steps?.Count > 0) + { + foreach (var step in groupStepToClone.m_steps) + { + this.Steps.Add(step.Clone() as TaskStep); + } + } + + if (groupStepToClone.m_outputs?.Count > 0) + { + this.m_outputs = new Dictionary(groupStepToClone.m_outputs, StringComparer.OrdinalIgnoreCase); + } + } + + public override StepType Type => StepType.Group; + + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + public IDictionary Outputs + { + get + { + if (m_outputs == null) + { + m_outputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_outputs; + } + } + + public override Step Clone() + { + return new GroupStep(this); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_steps?.Count == 0) + { + m_steps = null; + } + + if (m_outputs?.Count == 0) + { + m_outputs = null; + } + } + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private IList m_steps; + + [DataMember(Name = "Outputs", EmitDefaultValue = false)] + private IDictionary m_outputs; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IAgentPoolResolver.cs b/src/Sdk/DTPipelines/Pipelines/IAgentPoolResolver.cs new file mode 100644 index 00000000000..018605801cb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IAgentPoolResolver.cs @@ -0,0 +1,37 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism of resolving an AgentPoolReference to a TaskAgentPool. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IAgentPoolResolver + { + /// + /// Attempts to resolve the agent pool references to TaskAgentPool instances. + /// + /// The agent pools which should be resolved + /// A list containing the resolved agent pools + IList Resolve(ICollection references); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class IAgentPoolResolverExtensions + { + /// + /// Attempts to resolve the agent pool reference to a TaskAgentPool. + /// + /// The agent pool which should be resolved + /// The agent pool if resolved; otherwise, null + public static TaskAgentPool Resolve( + this IAgentPoolResolver resolver, + AgentPoolReference reference) + { + return resolver.Resolve(new[] { reference }).FirstOrDefault(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IAgentPoolStore.cs b/src/Sdk/DTPipelines/Pipelines/IAgentPoolStore.cs new file mode 100644 index 00000000000..f6e6626f4b0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IAgentPoolStore.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IAgentPoolStore + { + /// + /// Adds a reference which should be considered authorized. Future + /// calls to retrieve this resource will be treated as pre-authorized regardless + /// of authorization context used. + /// + /// The pools which should be authorized + void Authorize(IList pools); + + IList GetAuthorizedReferences(); + + TaskAgentPool Get(AgentPoolReference reference); + + /// + /// Gets the IAgentPoolResolver used by this store, if any. + /// + IAgentPoolResolver Resolver { get; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IAgentQueueResolver.cs b/src/Sdk/DTPipelines/Pipelines/IAgentQueueResolver.cs new file mode 100644 index 00000000000..14771855fe5 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IAgentQueueResolver.cs @@ -0,0 +1,37 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism of resolving an AgentQueueReference to a TaskAgentQueue. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IAgentQueueResolver + { + /// + /// Attempts to resolve the agent queue references to TaskAgentQueue instances. + /// + /// The agent queues which should be resolved + /// A list containing the resolved agent queues + IList Resolve(ICollection references); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class IAgentQueueResolverExtensions + { + /// + /// Attempts to resolve the agent queue reference to a TaskAgentQueue. + /// + /// The agent queue which should be resolved + /// The agent queue if resolved; otherwise, null + public static TaskAgentQueue Resolve( + this IAgentQueueResolver resolver, + AgentQueueReference reference) + { + return resolver.Resolve(new[] { reference }).FirstOrDefault(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IAgentQueueStore.cs b/src/Sdk/DTPipelines/Pipelines/IAgentQueueStore.cs new file mode 100644 index 00000000000..16d3f1e21fc --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IAgentQueueStore.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IAgentQueueStore + { + /// + /// Adds a reference which should be considered authorized. Future + /// calls to retrieve this resource will be treated as pre-authorized regardless + /// of authorization context used. + /// + /// The queue which should be authorized + void Authorize(IList queues); + + IList GetAuthorizedReferences(); + + TaskAgentQueue Get(AgentQueueReference reference); + + /// + /// Gets the IAgentQueueResolver used by this store, if any. + /// + IAgentQueueResolver Resolver { get; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ICounterResolver.cs b/src/Sdk/DTPipelines/Pipelines/ICounterResolver.cs new file mode 100644 index 00000000000..bd3e349efa0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ICounterResolver.cs @@ -0,0 +1,11 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ICounterResolver + { + Int32 Increment(IPipelineContext context, String prefix, Int32 seed); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ICounterStore.cs b/src/Sdk/DTPipelines/Pipelines/ICounterStore.cs new file mode 100644 index 00000000000..cfaba4c3e87 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ICounterStore.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ICounterStore + { + /// + /// Gets the counters which are allocated for this store. + /// + IReadOnlyDictionary Counters { get; } + + /// + /// Increments the counter with the given prefix. If no such counter exists, a new one will be created with + /// as the initial value. + /// + /// The counter prefix + /// The initial value for the counter if the counter does not exist + /// The incremented value + Int32 Increment(IPipelineContext context, String prefix, Int32 seed); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IEnvironmentResolver.cs b/src/Sdk/DTPipelines/Pipelines/IEnvironmentResolver.cs new file mode 100644 index 00000000000..b0f58b1c55b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IEnvironmentResolver.cs @@ -0,0 +1,14 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IEnvironmentResolver + { + EnvironmentInstance Resolve(String environmentName); + + EnvironmentInstance Resolve(Int32 environmentId); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IEnvironmentStore.cs b/src/Sdk/DTPipelines/Pipelines/IEnvironmentStore.cs new file mode 100644 index 00000000000..942a08f1bd1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IEnvironmentStore.cs @@ -0,0 +1,22 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a contract for resolving environment from a given store. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IEnvironmentStore + { + EnvironmentInstance ResolveEnvironment(String environmentName); + + EnvironmentInstance ResolveEnvironment(Int32 environmentId); + + EnvironmentInstance Get(EnvironmentReference reference); + + IList GetReferences(); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IGraphNode.cs b/src/Sdk/DTPipelines/Pipelines/IGraphNode.cs new file mode 100644 index 00000000000..178041f868d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IGraphNode.cs @@ -0,0 +1,51 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IGraphNode + { + String Name + { + get; + set; + } + + String DisplayName + { + get; + set; + } + + String Condition + { + get; + set; + } + + ISet DependsOn + { + get; + } + + void Validate(PipelineBuildContext context, ValidationResult result); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IGraphNodeInstance + { + Int32 Attempt { get; set; } + String Identifier { get; set; } + String Name { get; set; } + DateTime? StartTime { get; set; } + DateTime? FinishTime { get; set; } + TaskResult? Result { get; set; } + Boolean SecretsAccessed { get; } + IDictionary Outputs { get; } + void ResetSecretsAccessed(); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IJobFactory.cs b/src/Sdk/DTPipelines/Pipelines/IJobFactory.cs new file mode 100644 index 00000000000..edfe7050afb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IJobFactory.cs @@ -0,0 +1,20 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.Pipelines.Runtime; + +namespace GitHub.DistributedTask.Pipelines +{ + internal interface IJobFactory + { + String Name { get; } + + Job CreateJob( + JobExecutionContext context, + ExpressionValue container, + IDictionary> sidecarContainers, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + String displayName = null); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IPackageStore.cs b/src/Sdk/DTPipelines/Pipelines/IPackageStore.cs new file mode 100644 index 00000000000..4837420b46e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IPackageStore.cs @@ -0,0 +1,12 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IPackageStore + { + PackageVersion GetLatestVersion(String packageType); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IPhaseProvider.cs b/src/Sdk/DTPipelines/Pipelines/IPhaseProvider.cs new file mode 100644 index 00000000000..b306d20d4f4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IPhaseProvider.cs @@ -0,0 +1,20 @@ +using System; +using GitHub.DistributedTask.Pipelines.Validation; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// This is a temprary extension point for provider phase to participate in pipeline resource discover + /// This extension point can be removed after we have the schema driven resource discover + /// + public interface IPhaseProvider + { + String Provider { get; } + + /// + /// Validate pipeline with builder context to provide additional validation errors + /// and pipeline resource discover. + /// + ValidationResult Validate(PipelineBuildContext context, ProviderPhase phase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IPipelineContext.cs b/src/Sdk/DTPipelines/Pipelines/IPipelineContext.cs new file mode 100644 index 00000000000..4ceb339cf49 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IPipelineContext.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Logging; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides the environment and services available during build and execution of a pipeline. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IPipelineContext + { + ICounterStore CounterStore { get; } + + DictionaryContextData Data { get; } + + Int32 EnvironmentVersion { get; } + + EvaluationOptions ExpressionOptions { get; } + + IPipelineIdGenerator IdGenerator { get; } + + IPackageStore PackageStore { get; } + + PipelineResources ReferencedResources { get; } + + IResourceStore ResourceStore { get; } + + IReadOnlyList StepProviders { get; } + + ISecretMasker SecretMasker { get; } + + ITaskStore TaskStore { get; } + + IPipelineTraceWriter Trace { get; } + + ISet SystemVariableNames { get; } + + IDictionary Variables { get; } + + String ExpandVariables(String value, Boolean maskSecrets = false); + + ExpressionResult Evaluate(String expression); + + ExpressionResult Evaluate(JObject value); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IPipelineTraceWriter : ITraceWriter + { + void EnterProperty(String name); + void LeaveProperty(String name); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IPipelineContextExtensions.cs b/src/Sdk/DTPipelines/Pipelines/IPipelineContextExtensions.cs new file mode 100644 index 00000000000..4affd092358 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IPipelineContextExtensions.cs @@ -0,0 +1,81 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class IPipelineContextExtensions + { + /// + /// Uses the current context to validate the steps provided. + /// + /// The current pipeline context + /// The list of steps which should be validated + /// The options controlling the level of validation performed + /// A list of validation errors which were encountered, if any + public static IList Validate( + this IPipelineContext context, + IList steps, + PhaseTarget target, + BuildOptions options) + { + var builder = new PipelineBuilder(context); + return builder.Validate(steps, target, options); + } + + /// + /// Evaluates a property which is specified as an expression and writes the resulting value to the + /// corresponding trace log if one is specified on the context. + /// + /// The result type of the expression + /// The pipeline context + /// The name of the property being evaluated + /// The expression which should be evaluated + /// The default value if no expression is specified + /// True to write the default value if no expression is specified; otherwise, false + /// The result of the expression evaluation + internal static ExpressionResult Evaluate( + this IPipelineContext context, + String name, + ExpressionValue expression, + T defaultValue, + Boolean traceDefault = true) + { + ExpressionResult result = null; + if (expression != null) + { + if (expression.IsLiteral) + { + context.Trace?.Info($"{name}: {GetTraceValue(expression.Literal)}"); + result = new ExpressionResult(expression.Literal); + } + else + { + context.Trace?.EnterProperty(name); + result = expression.GetValue(context); + context.Trace?.LeaveProperty(name); + } + } + else if (traceDefault && context.Trace != null) + { + context.Trace.Info($"{name}: {defaultValue}"); + } + + return result ?? new ExpressionResult(defaultValue); + } + + private static String GetTraceValue(T value) + { + if (value.GetType().IsValueType) + { + return value.ToString(); + } + else + { + return $"{System.Environment.NewLine}{JsonUtility.ToString(value, indent: true)}"; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IPipelineIdGenerator.cs b/src/Sdk/DTPipelines/Pipelines/IPipelineIdGenerator.cs new file mode 100644 index 00000000000..bfb5ca8d4c8 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IPipelineIdGenerator.cs @@ -0,0 +1,35 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IPipelineIdGenerator + { + Guid GetInstanceId(params String[] segments); + + String GetInstanceName(params String[] segments); + + String GetStageIdentifier(String stageName); + + Guid GetStageInstanceId(String stageName, Int32 attempt); + + String GetStageInstanceName(String stageName, Int32 attempt); + + String GetPhaseIdentifier(String stageName, String phaseName); + + Guid GetPhaseInstanceId(String stageName, String phaseName, Int32 attempt); + + String GetPhaseInstanceName(String stageName, String phaseName, Int32 attempt); + + String GetJobIdentifier(String stageName, String phaseName, String jobName); + + Guid GetJobInstanceId(String stageName, String phaseName, String jobName, Int32 attempt); + + String GetJobInstanceName(String stageName, String phaseName, String jobName, Int32 attempt); + + Guid GetTaskInstanceId(String stageName, String phaseName, String jobName, Int32 jobAttempt, String name3); + + String GetTaskInstanceName(String stageName, String phaseName, String jobName, Int32 jobAttempt, String name); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IResourceStore.cs b/src/Sdk/DTPipelines/Pipelines/IResourceStore.cs new file mode 100644 index 00000000000..b85ce4933c9 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IResourceStore.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Artifacts; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + public interface IBuildStore : IStepProvider + { + void Add(BuildResource resource); + void Add(IEnumerable resources); + BuildResource Get(String alias); + IEnumerable GetAll(); + IArtifactResolver Resolver { get; } + } + + public interface IContainerStore + { + void Add(ContainerResource resource); + void Add(IEnumerable resources); + ContainerResource Get(String alias); + IEnumerable GetAll(); + } + + public interface IPipelineStore : IStepProvider + { + void Add(PipelineResource resource); + void Add(IEnumerable resources); + PipelineResource Get(String alias); + IEnumerable GetAll(); + } + + public interface IRepositoryStore : IStepProvider + { + void Add(RepositoryResource resource); + void Add(IEnumerable resources); + RepositoryResource Get(String alias); + IEnumerable GetAll(); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IResourceStore : IStepProvider + { + IBuildStore Builds { get; } + + IContainerStore Containers { get; } + + IServiceEndpointStore Endpoints { get; } + + ISecureFileStore Files { get; } + + IEnvironmentStore Environments { get; } + + IPipelineStore Pipelines { get; } + + IAgentQueueStore Queues { get; } + + IAgentPoolStore Pools { get; } + + IRepositoryStore Repositories { get; } + + IVariableGroupStore VariableGroups { get; } + + PipelineResources GetAuthorizedResources(); + + ServiceEndpoint GetEndpoint(Guid endpointId); + + ServiceEndpoint GetEndpoint(String endpointId); + + SecureFile GetFile(Guid fileId); + + SecureFile GetFile(String fileId); + + TaskAgentQueue GetQueue(Int32 queueId); + + TaskAgentQueue GetQueue(String queueId); + + TaskAgentPool GetPool(Int32 poolId); + + TaskAgentPool GetPool(String poolName); + + VariableGroup GetVariableGroup(Int32 groupId); + + VariableGroup GetVariableGroup(String groupId); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IResourceStoreExtensions.cs b/src/Sdk/DTPipelines/Pipelines/IResourceStoreExtensions.cs new file mode 100644 index 00000000000..181edeb2acc --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IResourceStoreExtensions.cs @@ -0,0 +1,198 @@ +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class IResourceStoreExtensions + { + /// + /// Extracts the full resources from the which are referenced in the + /// collection. + /// + /// The store which contains the resources + /// The resources which should be included with the job + /// A new JobResources instance with the filtered set of resources from the store + public static JobResources GetJobResources( + this IResourceStore store, + PipelineResources resources) + { + var jobResources = new JobResources(); + jobResources.Containers.AddRange(resources.Containers.Select(x => x.Clone())); + + foreach (var endpointRef in resources.Endpoints) + { + var endpoint = store.Endpoints.Get(endpointRef); + if (endpoint != null) + { + jobResources.Endpoints.Add(endpoint); + } + } + + foreach (var fileRef in resources.Files) + { + var file = store.Files.Get(fileRef); + if (file != null) + { + jobResources.SecureFiles.Add(file); + } + } + + foreach (var repository in resources.Repositories) + { + jobResources.Repositories.Add(store.Repositories.Get(repository.Alias)); + } + + return jobResources; + } + + /// + /// Retrieves a service endpoint from the store using the provided reference. + /// + /// The resource store which should be queried + /// The service endpoint reference which should be resolved + /// A ServiceEndpoint instance matching the specified reference if found; otherwise, null + public static ServiceEndpoint GetEndpoint( + this IResourceStore store, + ServiceEndpointReference reference) + { + return store.Endpoints.Get(reference); + } + + /// + /// Retrieves a secure file from the store using the provided reference. + /// + /// The resource store which should be queried + /// The secure file reference which should be resolved + /// A SecureFile instance matching the specified reference if found; otherwise, null + public static SecureFile GetFile( + this IResourceStore store, + SecureFileReference reference) + { + return store.Files.Get(reference); + } + + /// + /// Retrieves an agent queue from the store using the provided reference. + /// + /// The resource store which should be queried + /// The agent queue reference which should be resolved + /// A TaskAgentQueue instance matching the specified reference if found; otherwise, null + public static TaskAgentQueue GetQueue( + this IResourceStore store, + AgentQueueReference reference) + { + return store.Queues.Get(reference); + } + + /// + /// Retrieves an agent pool from the store using the provided reference. + /// + /// The resource store which should be queried + /// The agent pool reference which should be resolved + /// A TaskAgentPool instance matching the specified reference if found; otherwise, null + public static TaskAgentPool GetPool( + this IResourceStore store, + AgentPoolReference reference) + { + return store.Pools.Get(reference); + } + + /// + /// Retrieves a variable group from the store using the provided reference. + /// + /// The resource store which should be queried + /// The variable group reference which should be resolved + /// A VariableGroup instance matching the specified reference if found; otherwise, null + public static VariableGroup GetVariableGroup( + this IResourceStore store, + VariableGroupReference reference) + { + return store.VariableGroups.Get(reference); + } + + /// + /// Given a partially formed reference, returns the associated reference stored with the plan. + /// + public static ResourceReference GetSnappedReference( + this IResourceStore store, + ResourceReference r) + { + if (r is VariableGroupReference vgr) + { + var m = store.VariableGroups.Get(vgr); + if (m != null) + { + return new VariableGroupReference + { + Id = m.Id, + Name = m.Name + }; + } + } + else if (r is AgentQueueReference aqr) + { + var m = store.Queues.Get(aqr); + if (m != null) + { + return new AgentQueueReference + { + Id = m.Id, + Name = m.Name + }; + } + } + else if (r is AgentPoolReference apr) + { + var m = store.Pools.Get(apr); + if (m != null) + { + return new AgentPoolReference + { + Id = m.Id, + Name = m.Name + }; + } + } + else if (r is ServiceEndpointReference ser) + { + var m = store.Endpoints.Get(ser); + if (m != null) + { + return new ServiceEndpointReference + { + Id = m.Id, + Name = m.Name + }; + } + } + else if (r is SecureFileReference sfr) + { + var m = store.Files.Get(sfr); + if (m != null) + { + return new SecureFileReference + { + Id = m.Id, + Name = m.Name + }; + } + } + else if (r is EnvironmentReference er) + { + var m = store.Environments.Get(er); + if (m != null) + { + return new EnvironmentReference + { + Id = m.Id, + Name = m.Name + }; + } + } + + return r; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ISecureFileResolver.cs b/src/Sdk/DTPipelines/Pipelines/ISecureFileResolver.cs new file mode 100644 index 00000000000..be08e791efe --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ISecureFileResolver.cs @@ -0,0 +1,37 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism of resolving an SecureFileReference to a SecureFile. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ISecureFileResolver + { + /// + /// Attempts to resolve secure file references to a SecureFile instances. + /// + /// The file references which should be resolved + /// The resolved secure files + IList Resolve(ICollection references); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ISecureFileResolverExtensions + { + /// + /// Attempts to resolve the secure file reference to a SecureFile. + /// + /// The file reference which should be resolved + /// The secure file if resolved; otherwise, null + public static SecureFile Resolve( + this ISecureFileResolver resolver, + SecureFileReference reference) + { + return resolver.Resolve(new[] { reference }).FirstOrDefault(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ISecureFileStore.cs b/src/Sdk/DTPipelines/Pipelines/ISecureFileStore.cs new file mode 100644 index 00000000000..ea695805da1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ISecureFileStore.cs @@ -0,0 +1,19 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ISecureFileStore + { + IList GetAuthorizedReferences(); + + SecureFile Get(SecureFileReference reference); + + /// + /// Gets the ISecureFileResolver used by this store, if any. + /// + ISecureFileResolver Resolver { get; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IServiceEndpointResolver.cs b/src/Sdk/DTPipelines/Pipelines/IServiceEndpointResolver.cs new file mode 100644 index 00000000000..c8646f51b48 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IServiceEndpointResolver.cs @@ -0,0 +1,46 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism of resolving an ServiceEndpointReference to a ServiceEndpoint. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IServiceEndpointResolver + { + /// + /// Adds the endpoint reference as authorized to ensure future retrievals of the endpoint + /// are allowed regardless of security context. + /// + /// The endpoint reference which should be considered authorized + void Authorize(ServiceEndpointReference reference); + + /// + /// Attempts to resolve endpoint references to ServiceEndpoint instances. + /// + /// The endpoint references which should be resolved + /// The resolved service endpoints + IList Resolve(ICollection references); + + IList GetAuthorizedReferences(); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class IServiceEndpointResolverExtensions + { + /// + /// Attempts to resolve the endpoint reference to a ServiceEndpoint. + /// + /// The endpoint reference which should be resolved + /// The service endpoint if resolved; otherwise, null + public static ServiceEndpoint Resolve( + this IServiceEndpointResolver resolver, + ServiceEndpointReference reference) + { + return resolver.Resolve(new[] { reference }).FirstOrDefault(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IServiceEndpointStore.cs b/src/Sdk/DTPipelines/Pipelines/IServiceEndpointStore.cs new file mode 100644 index 00000000000..b72d7d95be2 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IServiceEndpointStore.cs @@ -0,0 +1,39 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides access to service endpoints which are referenced within a pipeline. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IServiceEndpointStore + { + /// + /// Retrieves the list of all endpoints authorized for use in this store. + /// + /// The list of ServiceEndpointReference objects authorized for use + IList GetAuthorizedReferences(); + + /// + /// Adds an endpoint reference which should be considered authorized. Future + /// calls to retrieve this resource will be treated as pre-authorized regardless + /// of authorization context used. + /// + /// The endpoint which should be authorized + void Authorize(ServiceEndpointReference endpoint); + + /// + /// Attempts to authorize an endpoint for use. + /// + /// The endpoint reference to be resolved + /// The endpoint if found and authorized; otherwise, null + ServiceEndpoint Get(ServiceEndpointReference endpoint); + + /// + /// Gets the IServiceEndpointResolver used by this store, if any. + /// + IServiceEndpointResolver Resolver { get; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IStepProvider.cs b/src/Sdk/DTPipelines/Pipelines/IStepProvider.cs new file mode 100644 index 00000000000..4358edd85ac --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IStepProvider.cs @@ -0,0 +1,21 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Pipelines +{ + public interface IStepProvider + { + IList GetPreSteps(IPipelineContext context, IReadOnlyList steps); + Dictionary> GetPostTaskSteps(IPipelineContext context, IReadOnlyList steps); + IList GetPostSteps(IPipelineContext context, IReadOnlyList steps); + + /// + /// Given a JobStep (eg., download step) it will translate into corresndponding task steps + /// + /// + /// Input step to be resolved + /// Resolved output steps + /// true if this is resolved, false otherwise. Passing a powershell step to ResolveStep would return false + Boolean ResolveStep(IPipelineContext context, JobStep step, out IList resolvedSteps); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ITaskResolver.cs b/src/Sdk/DTPipelines/Pipelines/ITaskResolver.cs new file mode 100644 index 00000000000..18bba5c6723 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ITaskResolver.cs @@ -0,0 +1,12 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITaskResolver + { + TaskDefinition Resolve(Guid taskId, String versionSpec); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ITaskStore.cs b/src/Sdk/DTPipelines/Pipelines/ITaskStore.cs new file mode 100644 index 00000000000..2afc0673ca6 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ITaskStore.cs @@ -0,0 +1,30 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a contract for resolving tasks from a given store. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITaskStore + { + /// + /// Resolves a task from the store using the unqiue identifier and version. + /// + /// The unique identifier of the task + /// The version of the task which is desired + /// The closest matching task definition if found; otherwise, null + TaskDefinition ResolveTask(Guid taskId, String version); + + /// + /// Resolves a task from the store using the specified name and version. + /// + /// The name of the task + /// The version of the task which is desired + /// The closest matching task definition if found; otherwise, null + TaskDefinition ResolveTask(String name, String version); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ITaskTemplateResolver.cs b/src/Sdk/DTPipelines/Pipelines/ITaskTemplateResolver.cs new file mode 100644 index 00000000000..e9d8ce669f4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ITaskTemplateResolver.cs @@ -0,0 +1,14 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITaskTemplateResolver + { + Boolean CanResolve(TaskTemplateReference template); + + IList ResolveTasks(TaskTemplateStep template); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ITaskTemplateStore.cs b/src/Sdk/DTPipelines/Pipelines/ITaskTemplateStore.cs new file mode 100644 index 00000000000..75b1a994b47 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ITaskTemplateStore.cs @@ -0,0 +1,16 @@ +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for task templates to be resolved at build time. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITaskTemplateStore + { + void AddProvider(ITaskTemplateResolver provider); + + IEnumerable ResolveTasks(TaskTemplateStep step); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IVariable.cs b/src/Sdk/DTPipelines/Pipelines/IVariable.cs new file mode 100644 index 00000000000..afa27899021 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IVariable.cs @@ -0,0 +1,88 @@ +using System; +using System.ComponentModel; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + public enum VariableType + { + Inline = 0, + Group = 1, + } + + [JsonConverter(typeof(VariableJsonConverter))] + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IVariable + { + VariableType Type { get; } + } + + internal class VariableJsonConverter : VssSecureJsonConverter + { + public VariableJsonConverter() + { + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(IVariable).IsAssignableFrom(objectType); + } + + public override Object ReadJson(JsonReader reader, Type objectType, Object existingValue, JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + var resultObj = JObject.Load(reader); + var variableType = VariableType.Inline; + if (resultObj.TryGetValue("type", StringComparison.OrdinalIgnoreCase, out var rawValue)) + { + if (rawValue.Type == JTokenType.Integer) + { + variableType = (VariableType)(Int32)rawValue; + } + if (rawValue.Type == JTokenType.String) + { + variableType = (VariableType)Enum.Parse(typeof(VariableType), (String)rawValue, true); + } + } + else if (resultObj.TryGetValue("id", StringComparison.OrdinalIgnoreCase, out _) || + resultObj.TryGetValue("groupType", StringComparison.OrdinalIgnoreCase, out _) || + resultObj.TryGetValue("secretStore", StringComparison.OrdinalIgnoreCase, out _)) + { + variableType = VariableType.Group; + } + + IVariable result = null; + switch (variableType) + { + case VariableType.Group: + result = new VariableGroupReference(); + break; + + default: + result = new Variable(); + break; + } + + using (var objectReader = resultObj.CreateReader()) + { + serializer.Populate(objectReader, result); + } + + return result; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IVariableGroupResolver.cs b/src/Sdk/DTPipelines/Pipelines/IVariableGroupResolver.cs new file mode 100644 index 00000000000..3d41ada2de3 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IVariableGroupResolver.cs @@ -0,0 +1,32 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism of resolving an VariableGroupReference to a VariableGroup. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IVariableGroupResolver + { + /// + /// Attempts to resolve variable group references to VariableGroup instances. + /// + /// The variable groups which should be resolved + /// The resolved variable groups + IList Resolve(ICollection references); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class IVariableGroupResolverExtensions + { + public static VariableGroup Resolve( + this IVariableGroupResolver resolver, + VariableGroupReference reference) + { + return resolver.Resolve(new[] { reference }).FirstOrDefault(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IVariableGroupStore.cs b/src/Sdk/DTPipelines/Pipelines/IVariableGroupStore.cs new file mode 100644 index 00000000000..c2f5272b9a1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IVariableGroupStore.cs @@ -0,0 +1,21 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IVariableGroupStore : IStepProvider + { + IList GetAuthorizedReferences(); + + VariableGroup Get(VariableGroupReference queue); + + IVariableValueProvider GetValueProvider(VariableGroupReference queue); + + /// + /// Gets the IVariableGroupsResolver used by this store, if any. + /// + IVariableGroupResolver Resolver { get; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/IVariableValueProvider.cs b/src/Sdk/DTPipelines/Pipelines/IVariableValueProvider.cs new file mode 100644 index 00000000000..8cc31596383 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/IVariableValueProvider.cs @@ -0,0 +1,22 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IVariableValueProvider + { + String GroupType + { + get; + } + + Boolean ShouldGetValues(IPipelineContext context); + + IList GetSteps(IPipelineContext context, VariableGroupReference group, IEnumerable keys); + + IDictionary GetValues(VariableGroup group, ServiceEndpoint endpoint, IEnumerable keys, Boolean includeSecrets); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Job.cs b/src/Sdk/DTPipelines/Pipelines/Job.cs new file mode 100644 index 00000000000..aa389b1b89b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Job.cs @@ -0,0 +1,291 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class Job + { + [JsonConstructor] + public Job() + { + } + + private Job(Job jobToCopy) + { + this.Id = jobToCopy.Id; + this.Name = jobToCopy.Name; + this.DisplayName = jobToCopy.DisplayName; + this.Container = jobToCopy.Container?.Clone(); + this.ServiceContainers = jobToCopy.ServiceContainers?.Clone(); + this.ContinueOnError = jobToCopy.ContinueOnError; + this.TimeoutInMinutes = jobToCopy.TimeoutInMinutes; + this.CancelTimeoutInMinutes = jobToCopy.CancelTimeoutInMinutes; + this.Workspace = jobToCopy.Workspace?.Clone(); + this.Target = jobToCopy.Target?.Clone(); + this.EnvironmentVariables = jobToCopy.EnvironmentVariables?.Clone(); + + if (jobToCopy.m_demands != null && jobToCopy.m_demands.Count > 0) + { + m_demands = new List(jobToCopy.m_demands.Select(x => x.Clone())); + } + + if (jobToCopy.m_steps != null && jobToCopy.m_steps.Count > 0) + { + m_steps = new List(jobToCopy.m_steps.Select(x => x.Clone() as JobStep)); + } + + if (jobToCopy.m_variables != null && jobToCopy.m_variables.Count > 0) + { + m_variables = new List(jobToCopy.m_variables); + } + + if (jobToCopy.m_sidecarContainers != null && jobToCopy.m_sidecarContainers.Count > 0) + { + m_sidecarContainers = new Dictionary(jobToCopy.m_sidecarContainers, StringComparer.OrdinalIgnoreCase); + } + } + + [DataMember] + public Guid Id + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken Container + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken ServiceContainers + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean ContinueOnError + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken EnvironmentVariables + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32 TimeoutInMinutes + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32 CancelTimeoutInMinutes + { + get; + set; + } + + public IList Demands + { + get + { + if (m_demands == null) + { + m_demands = new List(); + } + return m_demands; + } + } + + [DataMember(EmitDefaultValue = false)] + public IdentityRef ExecuteAs + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public WorkspaceOptions Workspace + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public PhaseTarget Target + { + get; + set; + } + + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + public IList Scopes + { + get + { + if (m_scopes == null) + { + m_scopes = new List(); + } + return m_scopes; + } + } + + public IDictionary SidecarContainers + { + get + { + if (m_sidecarContainers == null) + { + m_sidecarContainers = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_sidecarContainers; + } + } + + public IList Variables + { + get + { + if (m_variables == null) + { + m_variables = new List(); + } + return m_variables; + } + } + + public Job Clone() + { + return new Job(this); + } + + /// + /// Creates an instance of a task using the specified execution context. + /// + /// The job execution context + /// The name of the task in the steps list + /// + public CreateTaskResult CreateTask( + JobExecutionContext context, + String taskName) + { + ArgumentUtility.CheckStringForNullOrEmpty(taskName, nameof(taskName)); + + TaskDefinition definition = null; + var task = this.Steps.SingleOrDefault(x => taskName.Equals(x.Name, StringComparison.OrdinalIgnoreCase))?.Clone() as TaskStep; + if (task != null) + { + definition = context.TaskStore.ResolveTask(task.Reference.Id, task.Reference.Version); + foreach (var input in definition.Inputs.Where(x => x != null)) + { + var key = input.Name?.Trim() ?? String.Empty; + if (!String.IsNullOrEmpty(key)) + { + if (!task.Inputs.ContainsKey(key)) + { + task.Inputs[key] = input.DefaultValue?.Trim() ?? String.Empty; + } + } + } + + // Now expand any macros which appear in inputs + foreach (var input in task.Inputs.ToArray()) + { + task.Inputs[input.Key] = context.ExpandVariables(input.Value); + } + + // Set the system variables populated while running an individual task + context.Variables[WellKnownDistributedTaskVariables.TaskInstanceId] = task.Id.ToString("D"); + context.Variables[WellKnownDistributedTaskVariables.TaskDisplayName] = task.DisplayName ?? task.Name; + context.Variables[WellKnownDistributedTaskVariables.TaskInstanceName] = task.Name; + } + + return new CreateTaskResult(task, definition); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_demands?.Count == 0) + { + m_demands = null; + } + + if (m_steps?.Count == 0) + { + m_steps = null; + } + + if (m_scopes?.Count == 0) + { + m_scopes = null; + } + + if (m_variables?.Count == 0) + { + m_variables = null; + } + } + + [DataMember(Name = "Demands", EmitDefaultValue = false)] + private List m_demands; + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private List m_steps; + + [DataMember(Name = "Scopes", EmitDefaultValue = false)] + private List m_scopes; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private List m_variables; + + [DataMember(Name = "SidecarContainers", EmitDefaultValue = false)] + private IDictionary m_sidecarContainers; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/JobContainer.cs b/src/Sdk/DTPipelines/Pipelines/JobContainer.cs new file mode 100644 index 00000000000..ad952cf5320 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/JobContainer.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class JobContainer + { + /// + /// Generated unique alias + /// + public String Alias { get; } = Guid.NewGuid().ToString("N"); + + /// + /// Gets or sets the environment which is provided to the container. + /// + public IDictionary Environment + { + get; + set; + } + + /// + /// Gets or sets the container image name. + /// + public String Image + { + get; + set; + } + + /// + /// Gets or sets the options used for the container instance. + /// + public String Options + { + get; + set; + } + + /// + /// Gets or sets the volumes which are mounted into the container. + /// + public IList Volumes + { + get; + set; + } + + /// + /// Gets or sets the ports which are exposed on the container. + /// + public IList Ports + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/JobExpansionOptions.cs b/src/Sdk/DTPipelines/Pipelines/JobExpansionOptions.cs new file mode 100644 index 00000000000..5277822c4c6 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/JobExpansionOptions.cs @@ -0,0 +1,106 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Pipelines +{ + public class JobExpansionOptions + { + public JobExpansionOptions(ICollection configurations) + { + AddConfigurations(configurations); + } + + internal JobExpansionOptions(IDictionary configurations) + { + UpdateConfigurations(configurations); + } + + internal JobExpansionOptions( + String configuration, + Int32 attemptNumber = NoSpecifiedAttemptNumber) + { + if (!String.IsNullOrEmpty(configuration)) + { + this.Configurations.Add(configuration, attemptNumber); + } + } + + /// + /// Specifies a filter for the expansion of specific Phase configurations. + /// The key is the configuration name, the value is the explicitly requested + /// attempt number. + /// If mapping is null, there is no filter and all configurations will be + /// produced. + /// + internal IDictionary Configurations + { + get + { + if (m_configurations == null) + { + m_configurations = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_configurations; + } + } + + public Boolean IsIncluded(String configuration) + { + return m_configurations == null || m_configurations.ContainsKey(configuration); + } + + /// + /// Add new configurations, with no specified custom attempt number + /// + public void AddConfigurations(ICollection configurations) + { + if (configurations == null) + { + return; + } + + var localConfigs = this.Configurations; + foreach (var c in configurations) + { + if (!localConfigs.ContainsKey(c)) + { + localConfigs[c] = NoSpecifiedAttemptNumber; + } + } + } + + /// + /// add (or replace) any configurations and their associated attempt numbers with new provided values. + /// + public void UpdateConfigurations(IDictionary configurations) + { + if (configurations == null) + { + return; + } + + var localConfigs = this.Configurations; + foreach (var pair in configurations) + { + localConfigs[pair.Key] = pair.Value; + } + } + + /// + /// returns custom attempt number or JobExpansionOptions.NoSpecifiedAttemptNumber if none specified. + /// + /// configuration or "job name" + public Int32 GetAttemptNumber(String configuration) + { + if (m_configurations != null && m_configurations.TryGetValue(configuration, out Int32 number)) + { + return number; + } + + return NoSpecifiedAttemptNumber; + } + + public const Int32 NoSpecifiedAttemptNumber = -1; + private Dictionary m_configurations; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/JobFactory.cs b/src/Sdk/DTPipelines/Pipelines/JobFactory.cs new file mode 100644 index 00000000000..798547fbf2a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/JobFactory.cs @@ -0,0 +1,480 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class JobFactory : PhaseNode + { + public JobFactory() + { + } + + private JobFactory(JobFactory copy) + : base(copy) + { + if (copy.m_steps != null && copy.m_steps.Count > 0) + { + m_steps = new List(copy.m_steps.Select(x => x.Clone())); + } + } + + /// + /// Gets the phase type. + /// + [DataMember(EmitDefaultValue = false)] + public override PhaseType Type => PhaseType.JobFactory; + + public IList Scopes + { + get + { + if (m_scopes == null) + { + m_scopes = new List(); + } + return m_scopes; + } + } + + /// + /// Gets the list of steps associated with this phase. At runtime the steps will be used as a template for + /// the execution of a job. + /// + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken Strategy + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public ScalarToken JobDisplayName + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken JobTarget + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public ScalarToken JobTimeout + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public ScalarToken JobCancelTimeout + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken JobContainer + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken JobServiceContainers + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken EnvironmentVariables + { + get; + set; + } + + public void CheckExpandReferences( + out bool isEventReferenced, + out bool isOutputsReferenced) + { + isEventReferenced = false; + isOutputsReferenced = false; + var expressionTokens = Strategy.Traverse() + .Concat(JobDisplayName.Traverse()) + .Concat(JobTarget.Traverse()) + .Concat(JobTimeout.Traverse()) + .Concat(JobCancelTimeout.Traverse()) + .OfType() + .ToArray(); + var parser = new ExpressionParser(); + foreach (var expressionToken in expressionTokens) + { + var tree = parser.ValidateSyntax(expressionToken.Expression, null); + var isReferenced = tree.CheckReferencesContext( + PipelineTemplateConstants.EventPattern, + PipelineTemplateConstants.OutputsPattern); + if (!isEventReferenced) + { + isEventReferenced = isReferenced[0]; + } + + if (!isOutputsReferenced) + { + isOutputsReferenced = isReferenced[1]; + } + } + } + + public ExpandPhaseResult Expand( + PhaseExecutionContext context, + JobExpansionOptions options = null) + { + var result = new ExpandPhaseResult(); + + var trace = new JobFactoryTrace(context.Trace); + var schema = new PipelineTemplateSchemaFactory().CreateSchema(); + var templateEvaluator = new PipelineTemplateEvaluator(trace, schema); + + trace.Info("Evaluating strategy"); + var displayName = JobDisplayName is ExpressionToken ? null : DisplayName; + var strategy = templateEvaluator.EvaluateStrategy(Strategy, context.Data, displayName); + + foreach (var jobContext in ExpandContexts(context, options, strategy, trace, templateEvaluator)) + { + result.Jobs.Add(jobContext.Job); + } + + if (strategy.MaxParallel > 0) + { + result.MaxConcurrency = strategy.MaxParallel; + } + else + { + result.MaxConcurrency = result.Jobs.Count; + } + + result.FailFast = strategy.FailFast; + + return result; + } + + public IEnumerable ExpandContexts( + PhaseExecutionContext context, + JobExpansionOptions options = null, + StrategyResult strategy = null, + DistributedTask.ObjectTemplating.ITraceWriter trace = null, + PipelineTemplateEvaluator templateEvaluator = null) + { + if (trace == null) + { + trace = new JobFactoryTrace(context.Trace); + } + + if (templateEvaluator == null) + { + var schema = new PipelineTemplateSchemaFactory().CreateSchema(); + templateEvaluator = new PipelineTemplateEvaluator(trace, schema); + } + + // Strategy + if (strategy == null) + { + trace.Info("Evaluating strategy"); + var displayName = JobDisplayName is ExpressionToken ? null : DisplayName; + strategy = templateEvaluator.EvaluateStrategy(Strategy, context.Data, displayName); + } + + // Check max jobs + var maxJobs = context.ExecutionOptions.MaxJobExpansion ?? 100; + if (strategy.Configurations.Count > maxJobs) + { + throw new MaxJobExpansionException($"Strategy produced more than {maxJobs}"); + } + + // Create jobs + for (var i = 0; i < strategy.Configurations.Count; i++) + { + var configuration = strategy.Configurations[i]; + var jobName = configuration.Name; + var attempt = 1; + if (options?.Configurations.Count > 0) + { + if (!options.Configurations.TryGetValue(jobName, out attempt)) + { + continue; + } + } + + yield return CreateJob(trace, context, templateEvaluator, jobName, configuration.DisplayName, attempt, i + 1, strategy.Configurations.Count, configuration.ContextData); + } + } + + /// + /// Resolves external references and ensures the steps are compatible with the selected target. + /// + /// The validation context + public override void Validate( + PipelineBuildContext context, + ValidationResult result) + { + base.Validate(context, result); + + var phaseStepValidationResult = new Phase.StepValidationResult(); + + // Require the latest agent version. + if (context.BuildOptions.DemandLatestAgent) + { + var latestPackageVersion = context.PackageStore?.GetLatestVersion(WellKnownPackageTypes.Agent); + if (latestPackageVersion == null) + { + throw new NotSupportedException("Unable to determine the latest agent package version"); + } + + phaseStepValidationResult.MinAgentVersion = latestPackageVersion.ToString(); + } + + Phase.ValidateSteps(context, this, new AgentQueueTarget(), result, Steps, phaseStepValidationResult); + + // Resolve the target to ensure we have stable identifiers for the orchestration engine + // phase targets with expressions need to be evaluated against resolved job contexts. + bool validateTarget = false; + if (this.Target.Type == PhaseTargetType.Pool || this.Target.Type == PhaseTargetType.Server) + { + validateTarget = true; + } + else if (this.Target is AgentQueueTarget agentQueueTarget && agentQueueTarget.IsLiteral()) + { + validateTarget = true; + } + + if (validateTarget) + { + this.Target.Validate( + context, + context.BuildOptions, + result, + this.Steps, + phaseStepValidationResult.TaskDemands); + } + } + + private JobExecutionContext CreateJob( + DistributedTask.ObjectTemplating.ITraceWriter trace, + PhaseExecutionContext phaseContext, + PipelineTemplateEvaluator templateEvaluator, + String jobName, + String configurationDisplayName, + Int32 attempt, + Int32 positionInPhase, + Int32 totalJobsInPhase, + IDictionary contextData) + { + trace.Info($"Creating job '{jobName}'"); + var jobContext = new JobExecutionContext( + context: phaseContext, + job: new JobInstance(jobName, attempt), + variables: null, + positionInPhase: positionInPhase, + totalJobsInPhase: totalJobsInPhase, + data: contextData); + var job = new Job + { + Id = jobContext.GetInstanceId(), + Name = jobContext.Job.Name, + EnvironmentVariables = EnvironmentVariables, + Container = JobContainer, + ServiceContainers = JobServiceContainers, + }; + + if (JobDisplayName is ExpressionToken) + { + trace.Info("Evaluating display name"); + job.DisplayName = templateEvaluator.EvaluateJobDisplayName(JobDisplayName, jobContext.Data, DisplayName); + } + else if (!String.IsNullOrEmpty(configurationDisplayName)) + { + job.DisplayName = configurationDisplayName; + } + else + { + job.DisplayName = DisplayName; + } + + trace.Info("Evaluating timeout"); + job.TimeoutInMinutes = templateEvaluator.EvaluateJobTimeout(JobTimeout, jobContext.Data); + trace.Info("Evaluating cancel timeout"); + job.CancelTimeoutInMinutes = templateEvaluator.EvaluateJobCancelTimeout(JobCancelTimeout, jobContext.Data); + trace.Info("Evaluating target"); + job.Target = templateEvaluator.EvaluateJobTarget(JobTarget, jobContext.Data); + + jobContext.Job.Definition = job; + + // Resolve the pool by name + if (job.Target is AgentPoolTarget pool && + pool.Pool?.Id == 0 && + !String.IsNullOrEmpty(pool.Pool.Name?.Literal)) + { + var resolved = jobContext.ResourceStore.GetPool(pool.Pool.Name.Literal); + if (resolved != null) + { + pool.Pool = new AgentPoolReference { Id = resolved.Id, Name = resolved.Name }; + } + } + + // Resolve the queue by name + if (job.Target is AgentQueueTarget queue && + queue.Queue?.Id == 0 && + !String.IsNullOrEmpty(queue.Queue.Name?.Literal)) + { + var resolved = jobContext.ResourceStore.GetQueue(queue.Queue.Name.Literal); + if (resolved != null) + { + queue.Queue = new AgentQueueReference { Id = resolved.Id, Name = resolved.Name }; + } + } + + // Always add self + var self = jobContext.ResourceStore?.Repositories.Get(PipelineConstants.SelfAlias); + if (self == null) + { + throw new InvalidOperationException($"Repository '{PipelineConstants.SelfAlias}' not found"); + } + + jobContext.ReferencedResources.Repositories.Add(self); + + // Add the endpoint + if (self.Endpoint != null) + { + jobContext.ReferencedResources.AddEndpointReference(self.Endpoint); + var repositoryEndpoint = jobContext.ResourceStore?.GetEndpoint(self.Endpoint); + if (repositoryEndpoint == null) + { + throw new ResourceNotFoundException(PipelineStrings.ServiceEndpointNotFound(self.Endpoint)); + } + } + + // Update the execution context with the job-specific system variables + UpdateJobContextVariablesFromJob(jobContext, job); + + var steps = new List(); + var identifier = jobContext.GetInstanceName(); + foreach (var step in Steps) + { + if (step.Type == StepType.Action) + { + job.Steps.Add(Phase.CreateJobActionStep(jobContext, identifier, step as ActionStep)); + } + else + { + throw new NotSupportedException($"Unexpected step type '{step.Type}'"); + } + } + + foreach (var scope in Scopes) + { + job.Scopes.Add(scope); + } + + return jobContext; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_scopes?.Count == 0) + { + m_scopes = null; + } + + if (m_steps?.Count == 0) + { + m_steps = null; + } + } + + private sealed class JobFactoryTrace : DistributedTask.ObjectTemplating.ITraceWriter + { + public JobFactoryTrace(DistributedTask.Expressions2.ITraceWriter trace) + { + m_trace = trace; + } + + public void Error( + String message, + params Object[] args) + { + Info("##[error]", message, args); + } + + public void Info( + String message, + params Object[] args) + { + Info(String.Empty, message, args); + } + + public void Verbose( + String message, + params Object[] args) + { + Info("##[debug]", message, args); + } + + private void Info( + String prefix, + String message, + params Object[] args) + { + if (m_trace == null) + { + return; + } + + if (args?.Length > 0) + { + m_trace.Info(String.Format(CultureInfo.InvariantCulture, $"{prefix}{message}", args)); + } + else + { + m_trace.Info($"{prefix}{message}"); + } + } + + private DistributedTask.Expressions2.ITraceWriter m_trace; + } + + [DataMember(Name = "Scopes", EmitDefaultValue = false)] + private IList m_scopes; + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private IList m_steps; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/JobResources.cs b/src/Sdk/DTPipelines/Pipelines/JobResources.cs new file mode 100644 index 00000000000..7136b077569 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/JobResources.cs @@ -0,0 +1,108 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class JobResources + { + /// + /// Gets the collection of containers associated with the current job + /// + public List Containers + { + get + { + if (m_containers == null) + { + m_containers = new List(); + } + return m_containers; + } + } + + /// + /// Gets the collection of endpoints associated with the current job + /// + public List Endpoints + { + get + { + if (m_endpoints == null) + { + m_endpoints = new List(); + } + return m_endpoints; + } + } + + /// + /// Gets the collection of repositories associated with the current job + /// + public List Repositories + { + get + { + if (m_repositories == null) + { + m_repositories = new List(); + } + return m_repositories; + } + } + + /// + /// Gets the collection of secure files associated with the current job + /// + public List SecureFiles + { + get + { + if (m_secureFiles == null) + { + m_secureFiles = new List(); + } + return m_secureFiles; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_containers?.Count == 0) + { + m_containers = null; + } + + if (m_endpoints?.Count == 0) + { + m_endpoints = null; + } + + if (m_repositories?.Count == 0) + { + m_repositories = null; + } + + if (m_secureFiles?.Count == 0) + { + m_secureFiles = null; + } + } + + [DataMember(Name = "Containers", EmitDefaultValue = false)] + private List m_containers; + + [DataMember(Name = "Endpoints", EmitDefaultValue = false)] + private List m_endpoints; + + [DataMember(Name = "Repositories", EmitDefaultValue = false)] + private List m_repositories; + + [DataMember(Name = "SecureFiles", EmitDefaultValue = false)] + private List m_secureFiles; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/JobStep.cs b/src/Sdk/DTPipelines/Pipelines/JobStep.cs new file mode 100644 index 00000000000..1e5fa86cbab --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/JobStep.cs @@ -0,0 +1,48 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class JobStep : Step + { + [JsonConstructor] + public JobStep() + { + this.Enabled = true; + } + + protected JobStep(JobStep stepToClone) + : base(stepToClone) + { + this.Condition = stepToClone.Condition; + this.ContinueOnError = stepToClone.ContinueOnError?.Clone(); + this.TimeoutInMinutes = stepToClone.TimeoutInMinutes?.Clone(); + } + + [DataMember(EmitDefaultValue = false)] + public String Condition + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken ContinueOnError + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken TimeoutInMinutes + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/GraphConditionNamedValue.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/GraphConditionNamedValue.cs new file mode 100644 index 00000000000..cccc24d1bbf --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/GraphConditionNamedValue.cs @@ -0,0 +1,24 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Pipelines.Runtime; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + /// + /// Named-value node used when evaluating graph-node conditions + /// + [EditorBrowsable(EditorBrowsableState.Never)] + internal sealed class GraphConditionNamedValue : NamedValue where TInstance : IGraphNodeInstance + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var graphContext = context.State as GraphExecutionContext; + graphContext.Data.TryGetValue(Name, out var result); + return result; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/IFileProvider.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/IFileProvider.cs new file mode 100644 index 00000000000..f086d2be870 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/IFileProvider.cs @@ -0,0 +1,13 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IFileProvider + { + String GetFileContent(String path); + + String ResolvePath(String defaultRoot, String path); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JobDisplayNameBuilder.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JobDisplayNameBuilder.cs new file mode 100644 index 00000000000..56b83a6151f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JobDisplayNameBuilder.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.Globalization; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + internal sealed class JobDisplayNameBuilder + { + public JobDisplayNameBuilder(String jobFactoryDisplayName) + { + if (!String.IsNullOrEmpty(jobFactoryDisplayName)) + { + m_jobFactoryDisplayName = jobFactoryDisplayName; + m_segments = new List(); + } + } + + public void AppendSegment(String value) + { + if (String.IsNullOrEmpty(value) || m_segments == null) + { + return; + } + + m_segments.Add(value); + } + + public String Build() + { + if (String.IsNullOrEmpty(m_jobFactoryDisplayName)) + { + return null; + } + + var displayName = default(String); + if (m_segments.Count == 0) + { + displayName = m_jobFactoryDisplayName; + } + else + { + var joinedSegments = String.Join(", ", m_segments); + displayName = String.Format(CultureInfo.InvariantCulture, "{0} ({1})", m_jobFactoryDisplayName, joinedSegments); + } + + const Int32 maxDisplayNameLength = 100; + if (displayName.Length > maxDisplayNameLength) + { + displayName = displayName.Substring(0, maxDisplayNameLength - 3) + "..."; + } + + m_segments.Clear(); + return displayName; + } + + private readonly String m_jobFactoryDisplayName; + private readonly List m_segments; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JsonObjectReader.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JsonObjectReader.cs new file mode 100644 index 00000000000..72ec60de039 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/JsonObjectReader.cs @@ -0,0 +1,234 @@ +using System; +using System.Collections.Generic; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + internal sealed class JsonObjectReader : IObjectReader + { + internal JsonObjectReader( + Int32? fileId, + String input) + { + m_fileId = fileId; + var token = JToken.Parse(input); + m_enumerator = GetEvents(token, true).GetEnumerator(); + m_enumerator.MoveNext(); + } + + public Boolean AllowLiteral(out LiteralToken literal) + { + var current = m_enumerator.Current; + switch (current.Type) + { + case ParseEventType.Null: + literal = new NullToken(m_fileId, current.Line, current.Column); + m_enumerator.MoveNext(); + return true; + + case ParseEventType.Boolean: + literal = new BooleanToken(m_fileId, current.Line, current.Column, (Boolean)current.Value); + m_enumerator.MoveNext(); + return true; + + case ParseEventType.Number: + literal = new NumberToken(m_fileId, current.Line, current.Column, (Double)current.Value); + m_enumerator.MoveNext(); + return true; + + case ParseEventType.String: + literal = new StringToken(m_fileId, current.Line, current.Column, (String)current.Value); + m_enumerator.MoveNext(); + return true; + } + + literal = null; + return false; + } + + public Boolean AllowSequenceStart(out SequenceToken sequence) + { + var current = m_enumerator.Current; + if (current.Type == ParseEventType.SequenceStart) + { + sequence = new SequenceToken(m_fileId, current.Line, current.Column); + m_enumerator.MoveNext(); + return true; + } + + sequence = null; + return false; + } + + public Boolean AllowSequenceEnd() + { + if (m_enumerator.Current.Type == ParseEventType.SequenceEnd) + { + m_enumerator.MoveNext(); + return true; + } + + return false; + } + + public Boolean AllowMappingStart(out MappingToken mapping) + { + var current = m_enumerator.Current; + if (current.Type == ParseEventType.MappingStart) + { + mapping = new MappingToken(m_fileId, current.Line, current.Column); + m_enumerator.MoveNext(); + return true; + } + + mapping = null; + return false; + } + + public Boolean AllowMappingEnd() + { + if (m_enumerator.Current.Type == ParseEventType.MappingEnd) + { + m_enumerator.MoveNext(); + return true; + } + + return false; + } + + /// + /// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd. + /// + public void ValidateEnd() + { + if (m_enumerator.Current.Type == ParseEventType.DocumentEnd) + { + m_enumerator.MoveNext(); + return; + } + + throw new InvalidOperationException("Expected end of reader"); + } + + /// + /// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart. + /// + public void ValidateStart() + { + if (m_enumerator.Current.Type == ParseEventType.DocumentStart) + { + m_enumerator.MoveNext(); + return; + } + + throw new InvalidOperationException("Expected start of reader"); + } + + private IEnumerable GetEvents( + JToken token, + Boolean root = false) + { + if (root) + { + yield return new ParseEvent(0, 0, ParseEventType.DocumentStart); + } + + var lineInfo = token as Newtonsoft.Json.IJsonLineInfo; + var line = lineInfo.LineNumber; + var column = lineInfo.LinePosition; + + switch (token.Type) + { + case JTokenType.Null: + yield return new ParseEvent(line, column, ParseEventType.Null, null); + break; + + case JTokenType.Boolean: + yield return new ParseEvent(line, column, ParseEventType.Boolean, token.ToObject()); + break; + + case JTokenType.Float: + case JTokenType.Integer: + yield return new ParseEvent(line, column, ParseEventType.Number, token.ToObject()); + break; + + case JTokenType.String: + yield return new ParseEvent(line, column, ParseEventType.String, token.ToObject()); + break; + + case JTokenType.Array: + yield return new ParseEvent(line, column, ParseEventType.SequenceStart); + foreach (var item in (token as JArray)) + { + foreach (var e in GetEvents(item)) + { + yield return e; + } + } + yield return new ParseEvent(line, column, ParseEventType.SequenceEnd); + break; + + case JTokenType.Object: + yield return new ParseEvent(line, column, ParseEventType.MappingStart); + foreach (var pair in (token as JObject)) + { + yield return new ParseEvent(line, column, ParseEventType.String, pair.Key ?? String.Empty); + foreach (var e in GetEvents(pair.Value)) + { + yield return e; + } + } + yield return new ParseEvent(line, column, ParseEventType.MappingEnd); + break; + + default: + throw new NotSupportedException($"Unexpected JTokenType {token.Type}"); + } + + if (root) + { + yield return new ParseEvent(0, 0, ParseEventType.DocumentEnd); + } + } + + private struct ParseEvent + { + public ParseEvent( + Int32 line, + Int32 column, + ParseEventType type, + Object value = null) + { + Line = line; + Column = column; + Type = type; + Value = value; + } + + public readonly Int32 Line; + public readonly Int32 Column; + public readonly ParseEventType Type; + public readonly Object Value; + } + + private enum ParseEventType + { + None = 0, + Null, + Boolean, + Number, + String, + SequenceStart, + SequenceEnd, + MappingStart, + MappingEnd, + DocumentStart, + DocumentEnd, + } + + private IEnumerator m_enumerator; + private Int32? m_fileId; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/MatrixBuilder.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/MatrixBuilder.cs new file mode 100644 index 00000000000..a23feaab110 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/MatrixBuilder.cs @@ -0,0 +1,445 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + internal sealed class MatrixBuilder + { + internal MatrixBuilder( + TemplateContext context, + String jobFactoryDisplayName) + { + m_context = context; + m_jobFactoryDisplayName = jobFactoryDisplayName; + } + + internal void AddVector( + String name, + SequenceToken vector) + { + m_vectors.Add(name, vector.ToContextData()); + } + + internal DictionaryContextData Vectors => m_vectors; + + internal void Exclude(SequenceToken exclude) + { + m_excludeSequence = exclude; + } + + internal void Include(SequenceToken include) + { + m_includeSequence = include; + } + + internal IEnumerable Build() + { + if (m_vectors.Count > 0) + { + // Parse includes/excludes + var include = new MatrixInclude(m_context, m_vectors, m_includeSequence); + var exclude = new MatrixExclude(m_context, m_vectors, m_excludeSequence); + + // Calculate the cross product size + var productSize = 1; + foreach (var vectorPair in m_vectors) + { + checked + { + var vector = vectorPair.Value.AssertArray("vector"); + productSize *= vector.Count; + } + } + + var nameBuilder = new ReferenceNameBuilder(); + var displayNameBuilder = new JobDisplayNameBuilder(m_jobFactoryDisplayName); + + // Cross product + for (var productIndex = 0; productIndex < productSize; productIndex++) + { + // Matrix + var matrix = new DictionaryContextData(); + var blockSize = productSize; + foreach (var vectorPair in m_vectors) + { + var vectorName = vectorPair.Key; + var vector = vectorPair.Value.AssertArray("vector"); + blockSize = blockSize / vector.Count; + var vectorIndex = (productIndex / blockSize) % vector.Count; + matrix.Add(vectorName, vector[vectorIndex]); + } + + // Exclude + if (exclude.Match(matrix)) + { + continue; + } + + // New configuration + var configuration = new StrategyConfiguration(); + m_context.Memory.AddBytes(TemplateMemory.MinObjectSize); + + // Gather segments for name and display name + foreach (var matrixData in matrix.Traverse(omitKeys: true)) + { + var segment = default(String); + switch (matrixData?.Type) + { + case PipelineContextDataType.Boolean: + case PipelineContextDataType.Number: + case PipelineContextDataType.String: + segment = matrixData.ToString(); + break; + } + + if (!String.IsNullOrEmpty(segment)) + { + // Name segment + nameBuilder.AppendSegment(segment); + + // Display name segment + displayNameBuilder.AppendSegment(segment); + } + } + + // Name + configuration.Name = nameBuilder.Build(); + m_context.Memory.AddBytes(configuration.Name); + + // Display name + configuration.DisplayName = displayNameBuilder.Build(); + m_context.Memory.AddBytes(configuration.DisplayName); + + // Include + if (include.Match(matrix, out var extra)) + { + matrix.Add(extra); + } + + // Matrix context + configuration.ContextData.Add(PipelineTemplateConstants.Matrix, matrix); + m_context.Memory.AddBytes(PipelineTemplateConstants.Matrix); + m_context.Memory.AddBytes(matrix, traverse: true); + + // Add configuration + yield return configuration; + } + } + } + + private sealed class MatrixInclude + { + public MatrixInclude( + TemplateContext context, + DictionaryContextData vectors, + SequenceToken includeSequence) + { + // Convert to excludes sets + if (includeSequence?.Count > 0) + { + foreach (var includeItem in includeSequence) + { + var includeMapping = includeItem.AssertMapping("matrix includes item"); + + // Distinguish filters versus extra + var filter = new MappingToken(null, null, null); + var extra = new DictionaryContextData(); + foreach (var includePair in includeMapping) + { + var includeKeyLiteral = includePair.Key.AssertString("matrix include item key"); + if (vectors.ContainsKey(includeKeyLiteral.Value)) + { + filter.Add(includeKeyLiteral, includePair.Value); + } + else + { + extra.Add(includeKeyLiteral.Value, includePair.Value.ToContextData()); + } + } + + // At least one filter + if (filter.Count == 0) + { + context.Error(includeMapping, $"Matrix include mapping does not contain any filters"); + continue; + } + + // At least one extra + if (extra.Count == 0) + { + context.Error(includeMapping, $"Matrix include mapping does not contain any extra values to include"); + continue; + } + + // Add filter + m_filters.Add(new MatrixIncludeFilter(filter, extra)); + } + } + } + + public Boolean Match( + DictionaryContextData matrix, + out DictionaryContextData extra) + { + extra = default(DictionaryContextData); + foreach (var filter in m_filters) + { + if (filter.Match(matrix, out var items)) + { + if (extra == null) + { + extra = new DictionaryContextData(); + } + + foreach (var pair in items) + { + extra[pair.Key] = pair.Value; + } + } + } + + return extra != null; + } + + private readonly List m_filters = new List(); + } + + private sealed class MatrixIncludeFilter : MatrixFilter + { + public MatrixIncludeFilter( + MappingToken filter, + DictionaryContextData extra) + : base(filter) + { + m_extra = extra; + } + + public Boolean Match( + DictionaryContextData matrix, + out DictionaryContextData extra) + { + if (base.Match(matrix)) + { + extra = m_extra; + return true; + } + + extra = null; + return false; + } + + private readonly DictionaryContextData m_extra; + } + + private sealed class MatrixExclude + { + public MatrixExclude( + TemplateContext context, + DictionaryContextData vectors, + SequenceToken excludeSequence) + { + // Convert to excludes sets + if (excludeSequence?.Count > 0) + { + foreach (var excludeItem in excludeSequence) + { + var excludeMapping = excludeItem.AssertMapping("matrix excludes item"); + + // Check empty + if (excludeMapping.Count == 0) + { + context.Error(excludeMapping, $"Matrix exclude filter must not be empty"); + continue; + } + + // Validate first-level keys + foreach (var excludePair in excludeMapping) + { + var excludeKey = excludePair.Key.AssertString("matrix excludes item key"); + if (!vectors.ContainsKey(excludeKey.Value)) + { + context.Error(excludeKey, $"Matrix exclude key '{excludeKey.Value}' does not match any key within the matrix"); + continue; + } + } + + // Add filter + m_filters.Add(new MatrixExcludeFilter(excludeMapping)); + } + } + } + + public Boolean Match(DictionaryContextData matrix) + { + foreach (var filter in m_filters) + { + if (filter.Match(matrix)) + { + return true; + } + } + + return false; + } + + private readonly List m_filters = new List(); + } + + private sealed class MatrixExcludeFilter : MatrixFilter + { + public MatrixExcludeFilter(MappingToken filter) + : base(filter) + { + } + + public new Boolean Match(DictionaryContextData matrix) + { + return base.Match(matrix); + } + } + + private abstract class MatrixFilter + { + protected MatrixFilter(MappingToken matrixFilter) + { + var state = new MappingState(null, matrixFilter); + while (state != null) + { + if (state.MoveNext()) + { + var value = state.Mapping[state.Index].Value; + if (value is LiteralToken literal) + { + AddExpression(state, literal); + } + else + { + var mapping = state.Mapping[state.Index].Value.AssertMapping("matrix filter"); + state = new MappingState(state, mapping); + } + } + else + { + state = state.Parent; + } + } + } + + protected Boolean Match(DictionaryContextData matrix) + { + if (matrix.Count == 0) + { + throw new InvalidOperationException("Matrix filter cannot be empty"); + } + + foreach (var expression in m_expressions) + { + var result = expression.Evaluate(null, null, matrix, null); + if (result.IsFalsy) + { + return false; + } + } + + return true; + } + + private void AddExpression( + MappingState state, + LiteralToken literal) + { + var expressionLiteral = default(String); + switch (literal.Type) + { + case TokenType.Null: + expressionLiteral = ExpressionConstants.Null; + break; + + case TokenType.Boolean: + var booleanToken = literal as BooleanToken; + expressionLiteral = booleanToken.Value ? ExpressionConstants.True : ExpressionConstants.False; + break; + + case TokenType.Number: + var numberToken = literal as NumberToken; + expressionLiteral = String.Format(CultureInfo.InvariantCulture, ExpressionConstants.NumberFormat, numberToken.Value); + break; + + case TokenType.String: + var stringToken = literal as StringToken; + expressionLiteral = $"'{ExpressionUtility.StringEscape(stringToken.Value)}'"; + break; + + default: + throw new NotSupportedException($"Unexpected literal type '{literal.Type}'"); + } + + var str = $"{state.Path} == {expressionLiteral}"; + var parser = new ExpressionParser(); + var expression = parser.CreateTree(str, null, s_matrixFilterNamedValues, null); + m_expressions.Add(expression); + } + + private static readonly INamedValueInfo[] s_matrixFilterNamedValues = new INamedValueInfo[] + { + new NamedValueInfo(PipelineTemplateConstants.Matrix), + }; + private readonly List m_expressions = new List(); + } + + private sealed class MappingState + { + public MappingState( + MappingState parent, + MappingToken mapping) + { + Parent = parent; + Mapping = mapping; + Index = -1; + } + + public Boolean MoveNext() + { + if (++Index < Mapping.Count) + { + var keyLiteral = Mapping[Index].Key.AssertString("matrix filter key"); + var parentPath = Parent?.Path ?? PipelineTemplateConstants.Matrix; + Path = $"{parentPath}['{ExpressionUtility.StringEscape(keyLiteral.Value)}']"; + return true; + } + else + { + return false; + } + } + + public MappingState Parent; + public MappingToken Mapping; + public Int32 Index; + public String Path; + } + + private sealed class MatrixNamedValue : NamedValue + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return context.State; + } + } + + private readonly TemplateContext m_context; + private readonly String m_jobFactoryDisplayName; + private readonly DictionaryContextData m_vectors = new DictionaryContextData(); + private SequenceToken m_excludeSequence; + private SequenceToken m_includeSequence; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseOptions.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseOptions.cs new file mode 100644 index 00000000000..4ea8bf2b619 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseOptions.cs @@ -0,0 +1,45 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Reflection; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ParseOptions + { + public ParseOptions() + { + } + + internal ParseOptions(ParseOptions copy) + { + MaxFiles = copy.MaxFiles; + MaxFileSize = copy.MaxFileSize; + MaxResultSize = copy.MaxResultSize; + } + + public Int32 MaxDepth => 50; + + /// + /// Gets the maximum error message length before the message will be truncated. + /// + public Int32 MaxErrorMessageLength => 500; + + /// + /// Gets the maximum number of errors that can be recorded when parsing a pipeline. + /// + public Int32 MaxErrors => 10; + + /// + /// Gets or sets the maximum number of files that can be loaded when parsing a pipeline. Zero or less is treated as infinite. + /// + public Int32 MaxFiles { get; set; } = 50; + + public Int32 MaxFileSize { get; set; } = 1024 * 1024; // 1 mb + + public Int32 MaxParseEvents => 1000000; // 1 million + + public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseResult.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseResult.cs new file mode 100644 index 00000000000..b23b9c07a3a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ParseResult.cs @@ -0,0 +1,30 @@ +using System; +using System.IO; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + internal sealed class ParseResult + { + public TemplateContext Context { get; set; } + + public TemplateToken Value { get; set; } + + public String ToYaml() + { + if (Value == null) + { + return null; + } + + // Serialize + using (var stringWriter = new StringWriter()) + { + TemplateWriter.Write(new YamlObjectWriter(stringWriter), Value); + stringWriter.Flush(); + return stringWriter.ToString(); + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs new file mode 100644 index 00000000000..0675b993c9a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConstants.cs @@ -0,0 +1,82 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineTemplateConstants + { + public const String Always = "always"; + public const String BooleanStepsContext = "boolean-steps-context"; + public const String CancelTimeoutMinutes = "cancel-timeout-minutes"; + public const String Cancelled = "cancelled"; + public const String Checkout = "checkout"; + public const String Clean = "clean"; + public const String Container = "container"; + public const String ContinueOnError = "continue-on-error"; + public const String Env = "env"; + public const String Event = "event"; + public const String EventPattern = "github.event"; + public const String Exclude = "exclude"; + public const String FailFast = "fail-fast"; + public const String Failure = "failure"; + public const String FetchDepth = "fetch-depth"; + public const String GeneratedId = "generated-id"; + public const String GitHub = "github"; + public const String Id = "id"; + public const String If = "if"; + public const String Image = "image"; + public const String Include = "include"; + public const String Inputs = "inputs"; + public const String Job = "job"; + public const String Jobs = "jobs"; + public const String Lfs = "lfs"; + public const String Matrix = "matrix"; + public const String MaxParallel = "max-parallel"; + public const String Name = "name"; + public const String Needs = "needs"; + public const String NumberStepsContext = "number-steps-context"; + public const String NumberStrategyContext = "number-strategy-context"; + public const String On = "on"; + public const String Options = "options"; + public const String Outputs = "outputs"; + public const String OutputsPattern = "needs.*.outputs"; + public const String Path = "path"; + public const String Pool = "pool"; + public const String Ports = "ports"; + public const String Result = "result"; + public const String RunDisplayPrefix = "Run "; + public const String Run = "run"; + public const String Runner = "runner"; + public const String RunsOn = "runs-on"; + public const String Scope = "scope"; + public const String Scopes = "scopes"; + public const String Secrets = "secrets"; + public const String Services = "services"; + public const String Shell = "shell"; + public const String Skipped = "skipped"; + public const String StepEnv = "step-env"; + public const String Steps = "steps"; + public const String StepsScopeInputs = "steps-scope-inputs"; + public const String StepsScopeOutputs = "steps-scope-outputs"; + public const String StepsTemplateRoot = "steps-template-root"; + public const String StepWith = "step-with"; + public const String Strategy = "strategy"; + public const String StringStepsContext = "string-steps-context"; + public const String StringStrategyContext = "string-strategy-context"; + public const String Submodules = "submodules"; + public const String Success = "success"; + public const String Template = "template"; + public const String TimeoutMinutes = "timeout-minutes"; + public const String Token = "token"; + public const String Uses = "uses"; + public const String VmImage = "vmImage"; + public const String Volumes = "volumes"; + public const String With = "with"; + public const String Workflow = "workflow"; + public const String Workflow_1_0 = "workflow-v1.0"; + public const String WorkflowRoot = "workflow-root"; + public const String WorkingDirectory = "working-directory"; + public const String Workspace = "workspace"; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs new file mode 100644 index 00000000000..45f6dc697ae --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateConverter.cs @@ -0,0 +1,1147 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Expressions2.Sdk.Functions; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.Services.Common; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + internal static class PipelineTemplateConverter + { + internal static PipelineTemplate ConvertToPipeline( + TemplateContext context, + RepositoryResource self, + TemplateToken pipeline) + { + var result = new PipelineTemplate(); + result.Resources.Repositories.Add(self); + var defaultStage = new Stage + { + Name = PipelineConstants.DefaultJobName, + }; + result.Stages.Add(defaultStage); + + try + { + if (pipeline == null || context.Errors.Count > 0) + { + return result; + } + + var pipelineMapping = pipeline.AssertMapping("root"); + + foreach (var pipelinePair in pipelineMapping) + { + var pipelineKey = pipelinePair.Key.AssertString("root key"); + + switch (pipelineKey.Value) + { + case PipelineTemplateConstants.On: + break; + + case PipelineTemplateConstants.Name: + break; + + case PipelineTemplateConstants.Env: + result.EnvironmentVariables = pipelinePair.Value; + break; + + case PipelineTemplateConstants.Jobs: + defaultStage.Phases.AddRange(ConvertToJobFactories(context, result.Resources, pipelinePair.Value)); + break; + + default: + pipelineKey.AssertUnexpectedValue("root key"); // throws + break; + } + } + } + catch (Exception ex) + { + context.Errors.Add(ex); + } + finally + { + if (context.Errors.Count > 0) + { + foreach (var error in context.Errors) + { + result.Errors.Add(new PipelineValidationError(error.Code, error.Message)); + } + } + } + + return result; + } + + internal static String ConvertToJobDisplayName( + TemplateContext context, + TemplateToken displayName, + Boolean allowExpressions = false) + { + var result = default(String); + + // Expression + if (allowExpressions && displayName is ExpressionToken) + { + return result; + } + + // String + var displayNameString = displayName.AssertString($"job {PipelineTemplateConstants.Name}"); + result = displayNameString.Value; + return result; + } + + internal static PhaseTarget ConvertToJobTarget( + TemplateContext context, + TemplateToken runsOn, + Boolean allowExpressions = false) + { + var result = new AgentPoolTarget(); + + // Expression + if (allowExpressions && runsOn is ExpressionToken) + { + return result; + } + + // String + if (runsOn is StringToken runsOnString) + { + result.Pool = new AgentPoolReference { Name = "GitHub Actions" }; + result.AgentSpecification = new JObject + { + { PipelineTemplateConstants.VmImage, runsOnString.Value } + }; + } + // Mapping + else + { + var runsOnMapping = runsOn.AssertMapping($"job {PipelineTemplateConstants.RunsOn}"); + foreach (var runsOnProperty in runsOnMapping) + { + // Expression + if (allowExpressions && runsOnProperty.Key is ExpressionToken) + { + continue; + } + + // String + var propertyName = runsOnProperty.Key.AssertString($"job {PipelineTemplateConstants.RunsOn} key"); + + switch (propertyName.Value) + { + case PipelineTemplateConstants.Pool: + // Expression + if (allowExpressions && runsOnProperty.Value is ExpressionToken) + { + continue; + } + + // Literal + var pool = runsOnProperty.Value.AssertString($"job {PipelineTemplateConstants.RunsOn} key"); + result.Pool = new AgentPoolReference { Name = pool.Value }; + break; + + default: + propertyName.AssertUnexpectedValue($"job {PipelineTemplateConstants.RunsOn} key"); // throws + break; + } + } + } + + return result; + } + + internal static Int32? ConvertToJobTimeout( + TemplateContext context, + TemplateToken token, + Boolean allowExpressions = false) + { + if (allowExpressions && token is ExpressionToken) + { + return null; + } + + var numberToken = token.AssertNumber($"job {PipelineTemplateConstants.TimeoutMinutes}"); + return (Int32)numberToken.Value; + } + + internal static Int32? ConvertToJobCancelTimeout( + TemplateContext context, + TemplateToken token, + Boolean allowExpressions = false) + { + if (allowExpressions && token is ExpressionToken) + { + return null; + } + + var numberToken = token.AssertNumber($"job {PipelineTemplateConstants.CancelTimeoutMinutes}"); + return (Int32)numberToken.Value; + } + + internal static Boolean? ConvertToStepContinueOnError( + TemplateContext context, + TemplateToken token, + Boolean allowExpressions = false) + { + if (allowExpressions && token is ExpressionToken) + { + return null; + } + + var booleanToken = token.AssertBoolean($"step {PipelineTemplateConstants.ContinueOnError}"); + return booleanToken.Value; + } + + internal static String ConvertToStepDisplayName( + TemplateContext context, + TemplateToken token, + Boolean allowExpressions = false) + { + if (allowExpressions && token is ExpressionToken) + { + return null; + } + + var stringToken = token.AssertString($"step {PipelineTemplateConstants.Name}"); + return stringToken.Value; + } + + internal static Dictionary ConvertToStepEnvironment( + TemplateContext context, + TemplateToken environment, + StringComparer keyComparer, + Boolean allowExpressions = false) + { + var result = new Dictionary(keyComparer); + + // Expression + if (allowExpressions && environment is ExpressionToken) + { + return result; + } + + // Mapping + var mapping = environment.AssertMapping("environment"); + + foreach (var pair in mapping) + { + // Expression key + if (allowExpressions && pair.Key is ExpressionToken) + { + continue; + } + + // String key + var key = pair.Key.AssertString("environment key"); + + // Expression value + if (allowExpressions && pair.Value is ExpressionToken) + { + continue; + } + + // String value + var value = pair.Value.AssertString("environment value"); + result[key.Value] = value.Value; + } + + return result; + } + + internal static Dictionary ConvertToStepInputs( + TemplateContext context, + TemplateToken inputs, + Boolean allowExpressions = false) + { + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + // Expression + if (allowExpressions && inputs is ExpressionToken) + { + return result; + } + + // Mapping + var mapping = inputs.AssertMapping("inputs"); + + foreach (var pair in mapping) + { + // Expression key + if (allowExpressions && pair.Key is ExpressionToken) + { + continue; + } + + // Literal key + var key = pair.Key.AssertString("inputs key"); + + // Expression value + if (allowExpressions && pair.Value is ExpressionToken) + { + continue; + } + + // Literal value + var value = pair.Value.AssertString("inputs value"); + result[key.Value] = value.Value; + } + + return result; + } + + internal static Int32? ConvertToStepTimeout( + TemplateContext context, + TemplateToken token, + Boolean allowExpressions = false) + { + if (allowExpressions && token is ExpressionToken) + { + return null; + } + + var numberToken = token.AssertNumber($"step {PipelineTemplateConstants.TimeoutMinutes}"); + return (Int32)numberToken.Value; + } + + internal static StrategyResult ConvertToStrategy( + TemplateContext context, + TemplateToken token, + String jobFactoryDisplayName, + Boolean allowExpressions = false) + { + var result = new StrategyResult(); + + // Expression + if (allowExpressions && token is ExpressionToken) + { + return result; + } + + var strategyMapping = token.AssertMapping(PipelineTemplateConstants.Strategy); + var matrixBuilder = default(MatrixBuilder); + var hasExpressions = false; + + foreach (var strategyPair in strategyMapping) + { + // Expression key + if (allowExpressions && strategyPair.Key is ExpressionToken) + { + hasExpressions = true; + continue; + } + + // Literal key + var strategyKey = strategyPair.Key.AssertString("strategy key"); + + switch (strategyKey.Value) + { + // Fail-Fast + case PipelineTemplateConstants.FailFast: + if (allowExpressions && strategyPair.Value is ExpressionToken) + { + hasExpressions = true; + continue; + } + + var failFastBooleanToken = strategyPair.Value.AssertBoolean($"strategy {PipelineTemplateConstants.FailFast}"); + result.FailFast = failFastBooleanToken.Value; + break; + + // Max-Parallel + case PipelineTemplateConstants.MaxParallel: + if (allowExpressions && strategyPair.Value is ExpressionToken) + { + hasExpressions = true; + continue; + } + + var maxParallelNumberToken = strategyPair.Value.AssertNumber($"strategy {PipelineTemplateConstants.MaxParallel}"); + result.MaxParallel = (Int32)maxParallelNumberToken.Value; + break; + + // Matrix + case PipelineTemplateConstants.Matrix: + + // Expression + if (allowExpressions && strategyPair.Value is ExpressionToken) + { + hasExpressions = true; + continue; + } + + var matrix = strategyPair.Value.AssertMapping("matrix"); + hasExpressions = hasExpressions || matrix.Traverse().Any(x => x is ExpressionToken); + matrixBuilder = new MatrixBuilder(context, jobFactoryDisplayName); + var hasVector = false; + + foreach (var matrixPair in matrix) + { + // Expression key + if (allowExpressions && matrixPair.Key is ExpressionToken) + { + hasVector = true; // For validation, treat as if a vector is defined + continue; + } + + var matrixKey = matrixPair.Key.AssertString("matrix key"); + switch (matrixKey.Value) + { + case PipelineTemplateConstants.Include: + if (allowExpressions && matrixPair.Value is ExpressionToken) + { + continue; + } + + var includeSequence = matrixPair.Value.AssertSequence("matrix includes"); + matrixBuilder.Include(includeSequence); + break; + + case PipelineTemplateConstants.Exclude: + if (allowExpressions && matrixPair.Value is ExpressionToken) + { + continue; + } + + var excludeSequence = matrixPair.Value.AssertSequence("matrix excludes"); + matrixBuilder.Exclude(excludeSequence); + break; + + default: + hasVector = true; + + if (allowExpressions && matrixPair.Value is ExpressionToken) + { + continue; + } + + var vectorName = matrixKey.Value; + var vectorSequence = matrixPair.Value.AssertSequence("matrix vector value"); + if (vectorSequence.Count == 0) + { + context.Error(vectorSequence, $"Matrix vector '{vectorName}' does not contain any values"); + } + else + { + matrixBuilder.AddVector(vectorName, vectorSequence); + } + break; + } + } + + if (!hasVector) + { + context.Error(matrix, $"Matrix must defined at least one vector"); + } + + break; + + default: + strategyKey.AssertUnexpectedValue("strategy key"); // throws + break; + } + } + + if (hasExpressions) + { + return result; + } + + if (matrixBuilder != null) + { + result.Configurations.AddRange(matrixBuilder.Build()); + } + + for (var i = 0; i < result.Configurations.Count; i++) + { + var configuration = result.Configurations[i]; + + var strategy = new DictionaryContextData() + { + { + "fail-fast", + new BooleanContextData(result.FailFast) + }, + { + "job-index", + new NumberContextData(i) + }, + { + "job-total", + new NumberContextData(result.Configurations.Count) + } + }; + + if (result.MaxParallel > 0) + { + strategy.Add( + "max-parallel", + new NumberContextData(result.MaxParallel) + ); + } + else + { + strategy.Add( + "max-parallel", + new NumberContextData(result.Configurations.Count) + ); + } + + configuration.ContextData.Add(PipelineTemplateConstants.Strategy, strategy); + context.Memory.AddBytes(PipelineTemplateConstants.Strategy); + context.Memory.AddBytes(strategy, traverse: true); + + if (!configuration.ContextData.ContainsKey(PipelineTemplateConstants.Matrix)) + { + configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null); + context.Memory.AddBytes(PipelineTemplateConstants.Matrix); + } + } + + return result; + } + + internal static JobContainer ConvertToJobContainer( + TemplateContext context, + TemplateToken value, + bool allowExpressions = false) + { + var result = new JobContainer(); + if (allowExpressions && value.Traverse().Any(x => x is ExpressionToken)) + { + return result; + } + + if (value is StringToken containerLiteral) + { + result.Image = containerLiteral.Value; + } + else + { + var containerMapping = value.AssertMapping($"{PipelineTemplateConstants.Container}"); + foreach (var containerPropertyPair in containerMapping) + { + var propertyName = containerPropertyPair.Key.AssertString($"{PipelineTemplateConstants.Container} key"); + + switch (propertyName.Value) + { + case PipelineTemplateConstants.Image: + result.Image = containerPropertyPair.Value.AssertString($"{PipelineTemplateConstants.Container} {propertyName}").Value; + break; + case PipelineTemplateConstants.Env: + var env = containerPropertyPair.Value.AssertMapping($"{PipelineTemplateConstants.Container} {propertyName}"); + var envDict = new Dictionary(env.Count); + foreach (var envPair in env) + { + var envKey = envPair.Key.ToString(); + var envValue = envPair.Value.AssertString($"{PipelineTemplateConstants.Container} {propertyName} {envPair.Key.ToString()}").Value; + envDict.Add(envKey, envValue); + } + result.Environment = envDict; + break; + case PipelineTemplateConstants.Options: + result.Options = containerPropertyPair.Value.AssertString($"{PipelineTemplateConstants.Container} {propertyName}").Value; + break; + case PipelineTemplateConstants.Ports: + var ports = containerPropertyPair.Value.AssertSequence($"{PipelineTemplateConstants.Container} {propertyName}"); + var portList = new List(ports.Count); + foreach (var portItem in ports) + { + var portString = portItem.AssertString($"{PipelineTemplateConstants.Container} {propertyName} {portItem.ToString()}").Value; + portList.Add(portString); + } + result.Ports = portList; + break; + case PipelineTemplateConstants.Volumes: + var volumes = containerPropertyPair.Value.AssertSequence($"{PipelineTemplateConstants.Container} {propertyName}"); + var volumeList = new List(volumes.Count); + foreach (var volumeItem in volumes) + { + var volumeString = volumeItem.AssertString($"{PipelineTemplateConstants.Container} {propertyName} {volumeItem.ToString()}").Value; + volumeList.Add(volumeString); + } + result.Volumes = volumeList; + break; + default: + propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Container} key"); + break; + } + } + } + + if (result.Image.StartsWith("docker://", StringComparison.Ordinal)) + { + result.Image = result.Image.Substring("docker://".Length); + } + + if (String.IsNullOrEmpty(result.Image)) + { + context.Error(value, "Container image cannot be empty"); + } + + return result; + } + + internal static List> ConvertToJobServiceContainers( + TemplateContext context, + TemplateToken services, + bool allowExpressions = false) + { + var result = new List>(); + + if (allowExpressions && services.Traverse().Any(x => x is ExpressionToken)) + { + return result; + } + + var servicesMapping = services.AssertMapping("services"); + + foreach (var servicePair in servicesMapping) + { + var networkAlias = servicePair.Key.AssertString("services key").Value; + var container = ConvertToJobContainer(context, servicePair.Value); + result.Add(new KeyValuePair(networkAlias, container)); + } + + return result; + } + + private static IEnumerable ConvertToJobFactories( + TemplateContext context, + PipelineResources resources, + TemplateToken workflow) + { + var jobsMapping = workflow.AssertMapping(PipelineTemplateConstants.Jobs); + + foreach (var jobsPair in jobsMapping) + { + var jobNameToken = jobsPair.Key.AssertString($"{PipelineTemplateConstants.Jobs} key"); + if (!NameValidation.IsValid(jobNameToken.Value, true)) + { + context.Error(jobNameToken, $"Job name {jobNameToken.Value} is invalid. Names must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'"); + } + var result = new JobFactory + { + Name = jobNameToken.Value + }; + + var jobFactoryDefinition = jobsPair.Value.AssertMapping($"{PipelineTemplateConstants.Jobs} value"); + + foreach (var jobFactoryProperty in jobFactoryDefinition) + { + var propertyName = jobFactoryProperty.Key.AssertString($"job property name"); + + switch (propertyName.Value) + { + case PipelineTemplateConstants.ContinueOnError: + var continueOnErrorBooleanToken = jobFactoryProperty.Value.AssertBoolean($"job {PipelineTemplateConstants.ContinueOnError}"); + result.ContinueOnError = continueOnErrorBooleanToken.Value; + break; + + case PipelineTemplateConstants.If: + var ifCondition = jobFactoryProperty.Value.AssertString($"job {PipelineTemplateConstants.If}"); + result.Condition = ConvertToIfCondition(context, ifCondition, true, true); + break; + + case PipelineTemplateConstants.Name: + var displayName = jobFactoryProperty.Value.AssertScalar($"job {PipelineTemplateConstants.Name}"); + ConvertToJobDisplayName(context, displayName, allowExpressions: true); // Validate early if possible + if (displayName is StringToken) + { + result.DisplayName = displayName.ToString(); + } + else + { + result.JobDisplayName = displayName.Clone(true) as ExpressionToken; + } + break; + + case PipelineTemplateConstants.Needs: + if (jobFactoryProperty.Value is StringToken needsLiteral) + { + result.DependsOn.Add(needsLiteral.Value); + } + else + { + var needs = jobFactoryProperty.Value.AssertSequence($"job {PipelineTemplateConstants.Needs}"); + foreach (var needsItem in needs) + { + var need = needsItem.AssertString($"job {PipelineTemplateConstants.Needs} item"); + result.DependsOn.Add(need.Value); + } + } + break; + + case PipelineTemplateConstants.RunsOn: + ConvertToJobTarget(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible + result.JobTarget = jobFactoryProperty.Value.Clone(true); + break; + + case PipelineTemplateConstants.Scopes: + foreach (var scope in ConvertToScopes(context, jobFactoryProperty.Value)) + { + result.Scopes.Add(scope); + } + break; + + case PipelineTemplateConstants.Steps: + result.Steps.AddRange(ConvertToSteps(context, jobFactoryProperty.Value)); + break; + + case PipelineTemplateConstants.Strategy: + ConvertToStrategy(context, jobFactoryProperty.Value, null, allowExpressions: true); // Validate early if possible + result.Strategy = jobFactoryProperty.Value.Clone(true); + break; + + case PipelineTemplateConstants.TimeoutMinutes: + ConvertToJobTimeout(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible + result.JobTimeout = jobFactoryProperty.Value.Clone(true) as ScalarToken; + break; + + case PipelineTemplateConstants.CancelTimeoutMinutes: + ConvertToJobCancelTimeout(context, jobFactoryProperty.Value, allowExpressions: true); // Validate early if possible + result.JobCancelTimeout = jobFactoryProperty.Value.Clone(true) as ScalarToken; + break; + + case PipelineTemplateConstants.Container: + ConvertToJobContainer(context, jobFactoryProperty.Value, allowExpressions: true); + result.JobContainer = jobFactoryProperty.Value.Clone(true); + break; + + case PipelineTemplateConstants.Services: + ConvertToJobServiceContainers(context, jobFactoryProperty.Value, allowExpressions: true); + result.JobServiceContainers = jobFactoryProperty.Value.Clone(true); + break; + + case PipelineTemplateConstants.Env: + result.EnvironmentVariables = jobFactoryProperty.Value.Clone(true); + break; + + default: + propertyName.AssertUnexpectedValue("job key"); // throws + break; + } + } + + // todo: Move "required" support into schema validation + if (result.JobTarget == null) + { + context.Error(jobFactoryDefinition, $"The '{PipelineTemplateConstants.RunsOn}' property is required"); + } + + if (String.IsNullOrEmpty(result.DisplayName)) + { + result.DisplayName = result.Name; + } + + if (result.Scopes.Count > 0) + { + result.Steps.Insert( + 0, + new ActionStep + { + Reference = new ScriptReference(), + DisplayName = "WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL", + Inputs = new MappingToken(null, null, null) + { + { + new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), + new StringToken(null, null, null, "echo WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL") + } + } + }); + result.Steps.Add( + new ActionStep + { + Reference = new ScriptReference(), + DisplayName = "WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL", + Inputs = new MappingToken(null, null, null) + { + { + new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), + new StringToken(null, null, null, "echo WARNING: TEMPLATES ARE HIGHLY EXPERIMENTAL") + } + } + }); + } + + yield return result; + } + } + + private static IEnumerable ConvertToScopes( + TemplateContext context, + TemplateToken scopes) + { + var scopesSequence = scopes.AssertSequence($"job {PipelineTemplateConstants.Scopes}"); + + foreach (var scopesItem in scopesSequence) + { + var result = new ContextScope(); + var scope = scopesItem.AssertMapping($"{PipelineTemplateConstants.Scopes} item"); + + foreach (var scopeProperty in scope) + { + var propertyName = scopeProperty.Key.AssertString($"{PipelineTemplateConstants.Scopes} item key"); + + switch (propertyName.Value) + { + case PipelineTemplateConstants.Name: + var nameLiteral = scopeProperty.Value.AssertString($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Name}"); + result.Name = nameLiteral.Value; + break; + + case PipelineTemplateConstants.Inputs: + result.Inputs = scopeProperty.Value.AssertMapping($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Inputs}"); + break; + + case PipelineTemplateConstants.Outputs: + result.Outputs = scopeProperty.Value.AssertMapping($"{PipelineTemplateConstants.Scopes} item {PipelineTemplateConstants.Outputs}"); + break; + } + } + + yield return result; + } + } + + private static List ConvertToSteps( + TemplateContext context, + TemplateToken steps) + { + var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}"); + + var result = new List(); + foreach (var stepsItem in stepsSequence) + { + var step = ConvertToStep(context, stepsItem); + if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors + { + if (step.Enabled) + { + result.Add(step); + } + } + } + + return result; + } + + private static ActionStep ConvertToStep( + TemplateContext context, + TemplateToken stepsItem) + { + var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item"); + var continueOnError = default(ScalarToken); + var env = default(TemplateToken); + var id = default(StringToken); + var ifCondition = default(String); + var ifToken = default(StringToken); + var name = default(ScalarToken); + var run = default(ScalarToken); + var scope = default(StringToken); + var timeoutMinutes = default(ScalarToken); + var uses = default(StringToken); + var with = default(TemplateToken); + var workingDir = default(ScalarToken); + var path = default(ScalarToken); + var clean = default(ScalarToken); + var fetchDepth = default(ScalarToken); + var lfs = default(ScalarToken); + var submodules = default(ScalarToken); + var shell = default(ScalarToken); + + foreach (var stepProperty in step) + { + var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key"); + + switch (propertyName.Value) + { + case PipelineTemplateConstants.Clean: + clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}"); + break; + + case PipelineTemplateConstants.ContinueOnError: + ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible + continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}"); + break; + + case PipelineTemplateConstants.Env: + ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible + env = stepProperty.Value; + break; + + case PipelineTemplateConstants.FetchDepth: + fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}"); + break; + + case PipelineTemplateConstants.Id: + id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}"); + if (!NameValidation.IsValid(id.Value, true)) + { + context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'"); + } + break; + + case PipelineTemplateConstants.If: + ifToken = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}"); + break; + + case PipelineTemplateConstants.Lfs: + lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}"); + break; + + case PipelineTemplateConstants.Name: + name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}"); + break; + + case PipelineTemplateConstants.Path: + path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}"); + break; + + case PipelineTemplateConstants.Run: + run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}"); + break; + + case PipelineTemplateConstants.Shell: + shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}"); + break; + + case PipelineTemplateConstants.Scope: + scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}"); + break; + + case PipelineTemplateConstants.Submodules: + submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}"); + break; + + case PipelineTemplateConstants.TimeoutMinutes: + ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible + timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}"); + break; + + case PipelineTemplateConstants.Uses: + uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}"); + break; + + case PipelineTemplateConstants.With: + ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible + with = stepProperty.Value; + break; + + case PipelineTemplateConstants.WorkingDirectory: + workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}"); + break; + + default: + propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws + break; + } + } + + // Fixup the if-condition + var isDefaultScope = String.IsNullOrEmpty(scope?.Value); + ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope); + + if (run != null) + { + var result = new ActionStep + { + ScopeName = scope?.Value, + ContextName = id?.Value, + ContinueOnError = continueOnError?.Clone(true) as ScalarToken, + DisplayNameToken = name?.Clone(true) as ScalarToken, + Condition = ifCondition, + TimeoutInMinutes = timeoutMinutes?.Clone(true) as ScalarToken, + Environment = env?.Clone(true), + Reference = new ScriptReference(), + }; + + var inputs = new MappingToken(null, null, null); + inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run.Clone(true)); + + if (workingDir != null) + { + inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir.Clone(true)); + } + + if (shell != null) + { + inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell.Clone(true)); + } + + result.Inputs = inputs; + + return result; + } + else if (uses != null) + { + var result = new ActionStep + { + ScopeName = scope?.Value, + ContextName = id?.Value, + ContinueOnError = continueOnError?.Clone(true) as ScalarToken, + DisplayNameToken = name?.Clone(true) as ScalarToken, + Condition = ifCondition, + TimeoutInMinutes = timeoutMinutes?.Clone(true) as ScalarToken, + Inputs = with, + Environment = env, + }; + + if (uses.Value.StartsWith("docker://", StringComparison.Ordinal)) + { + var image = uses.Value.Substring("docker://".Length); + result.Reference = new ContainerRegistryReference { Image = image }; + } + else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\")) + { + result.Reference = new RepositoryPathReference + { + RepositoryType = PipelineConstants.SelfAlias, + Path = uses.Value + }; + } + else + { + var usesSegments = uses.Value.Split('@'); + var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries); + var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty; + + if (usesSegments.Length != 2 || + pathSegments.Length < 2 || + String.IsNullOrEmpty(pathSegments[0]) || + String.IsNullOrEmpty(pathSegments[1]) || + String.IsNullOrEmpty(gitRef)) + { + // todo: loc + context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'"); + } + else + { + var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}"; + var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty; + + result.Reference = new RepositoryPathReference + { + RepositoryType = RepositoryTypes.GitHub, + Name = repositoryName, + Ref = gitRef, + Path = directoryPath, + }; + } + } + + return result; + } + else + { + // todo: build a "required" concept into the parser + context.Error(step, $"Either '{PipelineTemplateConstants.Uses}' or '{PipelineTemplateConstants.Run}' is required"); + return null; + } + } + + private static String ConvertToIfCondition( + TemplateContext context, + StringToken ifCondition, + Boolean isJob, + Boolean isDefaultScope) + { + if (String.IsNullOrWhiteSpace(ifCondition?.Value)) + { + return $"{PipelineTemplateConstants.Success}()"; + } + + var condition = ifCondition.Value; + + var expressionParser = new ExpressionParser(); + var functions = default(IFunctionInfo[]); + var namedValues = default(INamedValueInfo[]); + if (isJob) + { + namedValues = s_jobIfNamedValues; + functions = PhaseCondition.FunctionInfo; + } + else + { + namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues; + functions = s_stepConditionFunctions; + } + + var node = default(ExpressionNode); + try + { + node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode; + } + catch (Exception ex) + { + context.Error(ifCondition, ex); + return null; + } + + if (node == null) + { + return $"{PipelineTemplateConstants.Success}()"; + } + + var hasStatusFunction = node.Traverse().Any(x => + { + if (x is Function function) + { + return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) || + String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) || + String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) || + String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase); + } + + return false; + }); + + return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})"; + } + + private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[] + { + new NamedValueInfo(PipelineTemplateConstants.GitHub), + }; + private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[] + { + new NamedValueInfo(PipelineTemplateConstants.Strategy), + new NamedValueInfo(PipelineTemplateConstants.Matrix), + new NamedValueInfo(PipelineTemplateConstants.Steps), + new NamedValueInfo(PipelineTemplateConstants.GitHub), + new NamedValueInfo(PipelineTemplateConstants.Job), + new NamedValueInfo(PipelineTemplateConstants.Runner), + new NamedValueInfo(PipelineTemplateConstants.Env), + }; + private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[] + { + new NamedValueInfo(PipelineTemplateConstants.Strategy), + new NamedValueInfo(PipelineTemplateConstants.Matrix), + new NamedValueInfo(PipelineTemplateConstants.Steps), + new NamedValueInfo(PipelineTemplateConstants.Inputs), + new NamedValueInfo(PipelineTemplateConstants.GitHub), + new NamedValueInfo(PipelineTemplateConstants.Job), + new NamedValueInfo(PipelineTemplateConstants.Runner), + new NamedValueInfo(PipelineTemplateConstants.Env), + }; + private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[] + { + new FunctionInfo(PipelineTemplateConstants.Always, 0, 0), + new FunctionInfo(PipelineTemplateConstants.Cancelled, 0, 0), + new FunctionInfo(PipelineTemplateConstants.Failure, 0, 0), + new FunctionInfo(PipelineTemplateConstants.Success, 0, 0), + }; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs new file mode 100644 index 00000000000..fc69ca272fb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateEvaluator.cs @@ -0,0 +1,526 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Threading; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Schema; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using ExpressionConstants = GitHub.DistributedTask.Expressions2.ExpressionConstants; +using ITraceWriter = GitHub.DistributedTask.ObjectTemplating.ITraceWriter; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineTemplateEvaluator + { + public PipelineTemplateEvaluator( + ITraceWriter trace, + TemplateSchema schema) + { + if (!String.Equals(schema.Version, PipelineTemplateConstants.Workflow_1_0, StringComparison.Ordinal)) + { + throw new NotSupportedException($"Unexpected template schema version '{schema.Version}'"); + } + + m_trace = trace; + m_schema = schema; + } + + public Int32 MaxDepth => 50; + + /// + /// Gets the maximum error message length before the message will be truncated. + /// + public Int32 MaxErrorMessageLength => 500; + + /// + /// Gets the maximum number of errors that can be recorded when parsing a pipeline. + /// + public Int32 MaxErrors => 10; + + public Int32 MaxEvents => 1000000; // 1 million + + public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb + + public StrategyResult EvaluateStrategy( + TemplateToken token, + DictionaryContextData contextData, + String jobFactoryDisplayName) + { + var result = new StrategyResult(); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Strategy, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToStrategy(context, token, jobFactoryDisplayName); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + if (result.Configurations.Count == 0) + { + var configuration = new StrategyConfiguration + { + Name = PipelineConstants.DefaultJobName, + DisplayName = new JobDisplayNameBuilder(jobFactoryDisplayName).Build(), + }; + configuration.ContextData.Add(PipelineTemplateConstants.Matrix, null); + configuration.ContextData.Add( + PipelineTemplateConstants.Strategy, + new DictionaryContextData + { + { + "fail-fast", + new BooleanContextData(result.FailFast) + }, + { + "job-index", + new NumberContextData(0) + }, + { + "job-total", + new NumberContextData(1) + }, + { + "max-parallel", + new NumberContextData(1) + } + }); + result.Configurations.Add(configuration); + } + + return result; + } + + public String EvaluateJobDisplayName( + TemplateToken token, + DictionaryContextData contextData, + String defaultDisplayName) + { + var result = default(String); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStrategyContext, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobDisplayName(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return !String.IsNullOrEmpty(result) ? result : defaultDisplayName; + } + + public PhaseTarget EvaluateJobTarget( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(PhaseTarget); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.RunsOn, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobTarget(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? throw new InvalidOperationException("Job target cannot be null"); + } + + public Int32 EvaluateJobTimeout( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(Int32?); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobTimeout(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? PipelineConstants.DefaultJobTimeoutInMinutes; + } + + public Int32 EvaluateJobCancelTimeout( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(Int32?); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStrategyContext, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobCancelTimeout(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? PipelineConstants.DefaultJobCancelTimeoutInMinutes; + } + + public DictionaryContextData EvaluateStepScopeInputs( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(DictionaryContextData); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeInputs, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = token.ToContextData().AssertDictionary("steps scope inputs"); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? new DictionaryContextData(); + } + + public DictionaryContextData EvaluateStepScopeOutputs( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(DictionaryContextData); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeOutputs, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = token.ToContextData().AssertDictionary("steps scope outputs"); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? new DictionaryContextData(); + } + + public Boolean EvaluateStepContinueOnError( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(Boolean?); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.BooleanStepsContext, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToStepContinueOnError(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? false; + } + + public Dictionary EvaluateStepEnvironment( + TemplateToken token, + DictionaryContextData contextData, + StringComparer keyComparer) + { + var result = default(Dictionary); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepEnv, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToStepEnvironment(context, token, keyComparer); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? new Dictionary(keyComparer); + } + + public Dictionary EvaluateStepInputs( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(Dictionary); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepWith, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToStepInputs(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + public Int32 EvaluateStepTimeout( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(Int32?); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStepsContext, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToStepTimeout(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result ?? 0; + } + + public JobContainer EvaluateJobContainer( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(JobContainer); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Container, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobContainer(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + + public IList> EvaluateJobServiceContainers( + TemplateToken token, + DictionaryContextData contextData) + { + var result = default(List>); + + if (token != null && token.Type != TokenType.Null) + { + var context = CreateContext(contextData); + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Services, token, 0, null, omitHeader: true); + context.Errors.Check(); + result = PipelineTemplateConverter.ConvertToJobServiceContainers(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + + return result; + } + + public Boolean TryEvaluateStepDisplayName( + TemplateToken token, + DictionaryContextData contextData, + out String stepName) + { + stepName = default(String); + var context = CreateContext(contextData); + + if (token != null && token.Type != TokenType.Null) + { + // We should only evaluate basic expressions if we are sure we have context on all the Named Values and functions + // Otherwise return and use a default name + if (token is BasicExpressionToken expressionToken) + { + ExpressionNode root = null; + try + { + root = new ExpressionParser().ValidateSyntax(expressionToken.Expression, null) as ExpressionNode; + } + catch (Exception exception) + { + context.Errors.Add(exception); + context.Errors.Check(); + } + foreach (var node in root.Traverse()) + { + if (node is NamedValue namedValue && !contextData.ContainsKey(namedValue.Name)) + { + return false; + } + else if (node is Function function && + !context.ExpressionFunctions.Any(item => String.Equals(item.Name, function.Name)) && + !ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name)) + { + return false; + } + } + } + + try + { + token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStepsContext, token, 0, null, omitHeader: true); + context.Errors.Check(); + stepName = PipelineTemplateConverter.ConvertToStepDisplayName(context, token); + } + catch (Exception ex) when (!(ex is TemplateValidationException)) + { + context.Errors.Add(ex); + } + + context.Errors.Check(); + } + return true; + } + + private TemplateContext CreateContext(DictionaryContextData contextData) + { + var result = new TemplateContext + { + CancellationToken = CancellationToken.None, + Errors = new TemplateValidationErrors(MaxErrors, MaxErrorMessageLength), + Memory = new TemplateMemory( + maxDepth: MaxDepth, + maxEvents: MaxEvents, + maxBytes: MaxResultSize), + Schema = m_schema, + TraceWriter = m_trace, + }; + + if (contextData != null) + { + foreach (var pair in contextData) + { + result.ExpressionValues[pair.Key] = pair.Value; + } + } + + // Compat for new agent against old server + foreach (var name in s_contextNames) + { + if (!result.ExpressionValues.ContainsKey(name)) + { + result.ExpressionValues[name] = null; + } + } + + return result; + } + + private readonly ITraceWriter m_trace; + private readonly TemplateSchema m_schema; + private readonly String[] s_contextNames = new[] + { + PipelineTemplateConstants.GitHub, + PipelineTemplateConstants.Strategy, + PipelineTemplateConstants.Matrix, + PipelineTemplateConstants.Secrets, + PipelineTemplateConstants.Steps, + PipelineTemplateConstants.Inputs, + PipelineTemplateConstants.Job, + PipelineTemplateConstants.Runner, + PipelineTemplateConstants.Env, + }; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateParser.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateParser.cs new file mode 100644 index 00000000000..62557d38c32 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateParser.cs @@ -0,0 +1,239 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.IO; +using System.Threading; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.ObjectTemplating.Schema; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + using GitHub.DistributedTask.ObjectTemplating; + + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineTemplateParser + { + static PipelineTemplateParser() + { + var schemaFactory = new PipelineTemplateSchemaFactory(); + s_schema = schemaFactory.CreateSchema(); + } + + public PipelineTemplateParser( + ITraceWriter trace, + ParseOptions options) + { + m_trace = trace ?? throw new ArgumentNullException(nameof(trace)); + m_parseOptions = new ParseOptions(options ?? throw new ArgumentNullException(nameof(options))); + } + + /// + /// Loads the YAML pipeline template + /// + /// Thrown when the entry YAML file does not exist + public PipelineTemplate LoadPipeline( + IFileProvider fileProvider, + RepositoryResource self, + String path, + CancellationToken cancellationToken) + { + fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider)); + self = self ?? throw new ArgumentNullException(nameof(self)); + var parseResult = LoadPipelineInternal(fileProvider, path, cancellationToken); + return PipelineTemplateConverter.ConvertToPipeline(parseResult.Context, self, parseResult.Value); + } + + internal ParseResult LoadPipelineInternal( + IFileProvider fileProvider, + String path, + CancellationToken cancellationToken) + { + // Setup the context + var templateLoader = new YamlTemplateLoader(new ParseOptions(m_parseOptions), fileProvider); + var context = new TemplateContext + { + CancellationToken = cancellationToken, + Errors = new TemplateValidationErrors(m_parseOptions.MaxErrors, m_parseOptions.MaxErrorMessageLength), + Memory = new TemplateMemory( + maxDepth: m_parseOptions.MaxDepth, + maxEvents: m_parseOptions.MaxParseEvents, + maxBytes: m_parseOptions.MaxResultSize), + Schema = s_schema, + TraceWriter = m_trace, + }; + + // Load the entry file + var token = default(TemplateToken); + try + { + token = templateLoader.LoadFile(context, null, null, path, PipelineTemplateConstants.WorkflowRoot); + } + catch (Exception ex) + { + context.Errors.Add(ex); + } + + var result = new ParseResult + { + Context = context, + Value = token, + }; + + if (token != null && context.Errors.Count == 0) + { + var templateReferenceCount = ResolveWorkflowTemplateReferences(context, templateLoader, token); + if (templateReferenceCount > 0 && context.Errors.Count == 0) + { + context.TraceWriter.Info(String.Empty); + context.TraceWriter.Info("# "); + context.TraceWriter.Info("# Template resolution complete. Final runtime YAML document:"); + context.TraceWriter.Info("# "); + context.TraceWriter.Info("{0}", result.ToYaml()); + } + } + + return result; + } + + private Int32 ResolveWorkflowTemplateReferences( + TemplateContext context, + YamlTemplateLoader templateLoader, + TemplateToken token) + { + var resolvedCount = 0; + var workflow = token.AssertMapping("workflow"); + foreach (var workflowProperty in workflow) + { + var workflowPropertyName = workflowProperty.Key.AssertString("workflow property"); + switch (workflowPropertyName.Value) + { + case PipelineTemplateConstants.Jobs: + resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value); + break; + + case PipelineTemplateConstants.Workflow: + resolvedCount += ResolveJobsTemplateReferences(context, templateLoader, workflowProperty.Value); + break; + } + } + + return resolvedCount; + } + + private Int32 ResolveJobsTemplateReferences( + TemplateContext context, + YamlTemplateLoader templateLoader, + TemplateToken token) + { + var resolvedCount = 0; + var jobs = token.AssertMapping("jobs"); + foreach (var jobsProperty in jobs) + { + var job = jobsProperty.Value.AssertMapping("jobs property value"); + var scopes = new SequenceToken(null, null, null); + foreach (var jobProperty in job) + { + var jobPropertyName = jobProperty.Key.AssertString("job property name"); + switch (jobPropertyName.Value) + { + case PipelineTemplateConstants.Steps: + resolvedCount += ResolveStepsTemplateReferences(context, templateLoader, jobProperty.Value, scopes); + break; + } + } + + if (scopes.Count > 0) + { + var scopesPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scopes); + job.Add(scopesPropertyName, scopes); + context.Memory.AddBytes(scopesPropertyName); + context.Memory.AddBytes(scopes); // Do not traverse, nested objects already accounted for + } + } + + return resolvedCount; + } + + private Int32 ResolveStepsTemplateReferences( + TemplateContext context, + YamlTemplateLoader templateLoader, + TemplateToken token, + SequenceToken scopes) + { + var resolvedCount = 0; + var steps = token.AssertSequence("steps"); + var stepIndex = 0; + while (stepIndex < steps.Count && context.Errors.Count == 0) + { + var step = steps[stepIndex].AssertMapping("step"); + if (!TemplateReference.TryCreate(step, out var reference)) + { + stepIndex++; + continue; + } + + resolvedCount++; + var template = templateLoader.LoadFile( + context, + reference.TemplatePath.FileId, + reference.TemplateScope, + reference.TemplatePath.Value, + PipelineTemplateConstants.StepsTemplateRoot); + + if (context.Errors.Count != 0) + { + break; + } + + var scope = reference.CreateScope(context, template); + + if (context.Errors.Count != 0) + { + break; + } + + // Remove the template reference and memory overhead + steps.RemoveAt(stepIndex); + context.Memory.SubtractBytes(step, true); // Traverse + + // Remove the template memory overhead + context.Memory.SubtractBytes(template, true); // Traverse + + var templateSteps = GetSteps(template); + if (templateSteps?.Count > 0) + { + // Add the steps from the template + steps.InsertRange(stepIndex, templateSteps); + context.Memory.AddBytes(templateSteps, true); // Traverse + context.Memory.SubtractBytes(templateSteps, false); + + // Add the scope + scopes.Add(scope); + context.Memory.AddBytes(scope, true); // Traverse + } + } + + return resolvedCount; + } + + private SequenceToken GetSteps(TemplateToken template) + { + var mapping = template.AssertMapping("steps template"); + foreach (var property in mapping) + { + var propertyName = property.Key.AssertString("steps template property name"); + switch (propertyName.Value) + { + case PipelineTemplateConstants.Steps: + return property.Value.AssertSequence("steps template steps property value"); + } + } + + return null; + } + + private static TemplateSchema s_schema; + private readonly ParseOptions m_parseOptions; + private readonly ITraceWriter m_trace; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateSchemaFactory.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateSchemaFactory.cs new file mode 100644 index 00000000000..55db1ea13f4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/PipelineTemplateSchemaFactory.cs @@ -0,0 +1,26 @@ +using System; +using System.ComponentModel; +using System.IO; +using System.Reflection; +using GitHub.DistributedTask.ObjectTemplating.Schema; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineTemplateSchemaFactory + { + public TemplateSchema CreateSchema() + { + var assembly = Assembly.GetExecutingAssembly(); + var json = default(String); + using (var stream = assembly.GetManifestResourceStream("GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json")) + using (var streamReader = new StreamReader(stream)) + { + json = streamReader.ReadToEnd(); + } + + var objectReader = new JsonObjectReader(null, json); + return TemplateSchema.Load(objectReader); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs new file mode 100644 index 00000000000..2b42f29187b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/ReferenceNameBuilder.cs @@ -0,0 +1,121 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Text; +using GitHub.DistributedTask.Pipelines.Validation; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + internal sealed class ReferenceNameBuilder + { + internal void AppendSegment(String value) + { + if (String.IsNullOrEmpty(value)) + { + return; + } + + if (m_name.Length == 0) + { + var first = value[0]; + if ((first >= 'a' && first <= 'z') || + (first >= 'A' && first <= 'Z') || + first == '_') + { + // Legal first char + } + else if ((first >= '0' && first <= '9') || first == '-') + { + // Illegal first char, but legal char. + // Prepend "_". + m_name.Append("_"); + } + else + { + // Illegal char + } + } + else + { + // Separator + m_name.Append(c_separator); + } + + foreach (var c in value) + { + if ((c >= 'a' && c <= 'z') || + (c >= 'A' && c <= 'Z') || + (c >= '0' && c <= '9') || + c == '_' || + c == '-') + { + // Legal + m_name.Append(c); + } + else + { + // Illegal + m_name.Append("_"); + } + } + } + + internal String Build() + { + var original = m_name.Length > 0 ? m_name.ToString() : "job"; + + var attempt = 1; + var suffix = default(String); + while (true) + { + if (attempt == 1) + { + suffix = String.Empty; + } + else if (attempt < 1000) + { + suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt); + } + else + { + throw new InvalidOperationException("Unable to create a unique name"); + } + + var candidate = original.Substring(0, Math.Min(original.Length, PipelineConstants.MaxNodeNameLength - suffix.Length)) + suffix; + + if (m_distinctNames.Add(candidate)) + { + m_name.Clear(); + return candidate; + } + + attempt++; + } + } + + internal Boolean TryAddKnownName( + String value, + out String error) + { + if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength) + { + error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters."; + return false; + } + else if (!m_distinctNames.Add(value)) + { + error = $"The identifier '{value}' may not be used more than once within the same scope."; + return false; + } + else + { + error = null; + return true; + } + } + + private const String c_separator = "_"; + private readonly HashSet m_distinctNames = new HashSet(StringComparer.OrdinalIgnoreCase); + private readonly StringBuilder m_name = new StringBuilder(); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TaskResultExtensions.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TaskResultExtensions.cs new file mode 100644 index 00000000000..ab6ab22d8c5 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TaskResultExtensions.cs @@ -0,0 +1,37 @@ +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + public static class TaskResultExtensions + { + public static PipelineContextData ToContextData(this TaskResult result) + { + switch (result) + { + case TaskResult.Succeeded: + case TaskResult.SucceededWithIssues: + return new StringContextData(PipelineTemplateConstants.Success); + case TaskResult.Failed: + case TaskResult.Abandoned: + return new StringContextData(PipelineTemplateConstants.Failure); + case TaskResult.Canceled: + return new StringContextData(PipelineTemplateConstants.Cancelled); + case TaskResult.Skipped: + return new StringContextData(PipelineTemplateConstants.Skipped); + } + + return null; + } + + public static PipelineContextData ToContextData(this TaskResult? result) + { + if (result.HasValue) + { + return result.Value.ToContextData(); + } + + return null; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TemplateReference.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TemplateReference.cs new file mode 100644 index 00000000000..170ec23a0f4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/TemplateReference.cs @@ -0,0 +1,197 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.IO; +using System.Threading; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.ObjectTemplating.Schema; +using GitHub.DistributedTask.Pipelines.ObjectTemplating; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + using GitHub.DistributedTask.ObjectTemplating; + + internal sealed class TemplateReference + { + private TemplateReference( + String scope, + String id, + String generatedId, + StringToken templatePath, + MappingToken inputs) + { + Scope = scope; + TemplatePath = templatePath; + Inputs = inputs; + + if (!String.IsNullOrEmpty(generatedId)) + { + Id = generatedId; + m_isGeneratedId = true; + } + else + { + Id = id; + } + } + + internal String Id { get; } + + internal MappingToken Inputs { get; } + + internal String Scope { get; } + + internal StringToken TemplatePath { get; } + + internal String TemplateScope + { + get + { + return !String.IsNullOrEmpty(Scope) ? $"{Scope}.{Id}" : Id; + } + } + + internal MappingToken CreateScope( + TemplateContext context, + TemplateToken template) + { + var mapping = template.AssertMapping("template file"); + + // Get the inputs and outputs from the template + var inputs = default(MappingToken); + var outputs = default(MappingToken); + foreach (var pair in mapping) + { + var propertyName = pair.Key.AssertString("template file property name"); + switch (propertyName.Value) + { + case PipelineTemplateConstants.Inputs: + inputs = pair.Value.AssertMapping("template file inputs"); + break; + + case PipelineTemplateConstants.Outputs: + if (!m_isGeneratedId) + { + outputs = pair.Value.AssertMapping("template file outputs"); + } + break; + } + } + + // Determine allowed input names + var allowedInputNames = new HashSet(StringComparer.OrdinalIgnoreCase); + if (inputs?.Count > 0) + { + foreach (var pair in inputs) + { + var inputPropertyName = pair.Key.AssertString("template file inputs property"); + allowedInputNames.Add(inputPropertyName.Value); + } + } + + // Validate override inputs names + var overrideInputs = new HashSet(StringComparer.OrdinalIgnoreCase); + var mergedInputs = new MappingToken(null, null, null); + if (Inputs?.Count > 0) + { + foreach (var pair in Inputs) + { + var inputPropertyName = pair.Key.AssertString("template reference inputs property"); + if (!allowedInputNames.Contains(inputPropertyName.Value)) + { + context.Error(inputPropertyName, $"Input '{inputPropertyName.Value}' is not allowed"); + continue; + } + + overrideInputs.Add(inputPropertyName.Value); + mergedInputs.Add(pair.Key, pair.Value); + } + } + + // Merge defaults + if (inputs?.Count > 0) + { + foreach (var pair in inputs) + { + var inputPropertyName = pair.Key.AssertString("template file inputs property"); + if (!overrideInputs.Contains(inputPropertyName.Value)) + { + mergedInputs.Add(pair.Key, pair.Value); + } + } + } + + // Build the scope object + var result = new MappingToken(null, null, null); + var namePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Name); + var namePropertyValue = new StringToken(null, null, null, TemplateScope); + result.Add(namePropertyName, namePropertyValue); + if (mergedInputs.Count > 0) + { + var inputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Inputs); + result.Add(inputsPropertyName, mergedInputs); + } + + if (outputs?.Count > 0) + { + var outputsPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Outputs); + result.Add(outputsPropertyName, outputs); + } + + return result; + } + + internal static Boolean TryCreate( + MappingToken mapping, + out TemplateReference reference) + { + var scope = default(String); + var id = default(String); + var generatedId = default(String); + var templatePath = default(StringToken); + var inputs = default(MappingToken); + foreach (var property in mapping) + { + var propertyName = property.Key.AssertString("candidate template reference property name"); + switch (propertyName.Value) + { + case PipelineTemplateConstants.Scope: + var scopeStringToken = property.Value.AssertString("step scope"); + scope = scopeStringToken.Value; + break; + + case PipelineTemplateConstants.Id: + var idStringToken = property.Value.AssertString("step id"); + id = idStringToken.Value; + break; + + case PipelineTemplateConstants.GeneratedId: + var generatedIdStringToken = property.Value.AssertString("step generated id"); + generatedId = generatedIdStringToken.Value; + break; + + case PipelineTemplateConstants.Template: + templatePath = property.Value.AssertString("step template reference"); + break; + + case PipelineTemplateConstants.Inputs: + inputs = property.Value.AssertMapping("step template reference inputs"); + break; + } + } + + if (templatePath != null) + { + reference = new TemplateReference(scope, id, generatedId, templatePath, inputs); + return true; + } + else + { + reference = null; + return false; + } + } + + private Boolean m_isGeneratedId; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectReader.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectReader.cs new file mode 100644 index 00000000000..431cceac003 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectReader.cs @@ -0,0 +1,572 @@ +using System; +using System.Globalization; +using System.IO; +using System.Linq; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using YamlDotNet.Core; +using YamlDotNet.Core.Events; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + /// + /// Converts a YAML file into a TemplateToken + /// + internal sealed class YamlObjectReader : IObjectReader + { + internal YamlObjectReader( + Int32? fileId, + TextReader input) + { + m_fileId = fileId; + m_parser = new Parser(input); + } + + public Boolean AllowLiteral(out LiteralToken value) + { + if (EvaluateCurrent() is Scalar scalar) + { + // Tag specified + if (!String.IsNullOrEmpty(scalar.Tag)) + { + // String tag + if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal)) + { + value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value); + MoveNext(); + return true; + } + + // Not plain style + if (scalar.Style != ScalarStyle.Plain) + { + throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'"); + } + + // Boolean, Float, Integer, or Null + switch (scalar.Tag) + { + case c_booleanTag: + value = ParseBoolean(scalar); + break; + case c_floatTag: + value = ParseFloat(scalar); + break; + case c_integerTag: + value = ParseInteger(scalar); + break; + case c_nullTag: + value = ParseNull(scalar); + break; + default: + throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'"); + } + + MoveNext(); + return true; + } + + // Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + if (scalar.Style == ScalarStyle.Plain) + { + if (MatchNull(scalar, out var nullToken)) + { + value = nullToken; + } + else if (MatchBoolean(scalar, out var booleanToken)) + { + value = booleanToken; + } + else if (MatchInteger(scalar, out var numberToken) || + MatchFloat(scalar, out numberToken)) + { + value = numberToken; + } + else + { + value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value); + } + + MoveNext(); + return true; + } + + // Otherwise assume string + value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value); + MoveNext(); + return true; + } + + value = default; + return false; + } + + public Boolean AllowSequenceStart(out SequenceToken value) + { + if (EvaluateCurrent() is SequenceStart sequenceStart) + { + value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column); + MoveNext(); + return true; + } + + value = default; + return false; + } + + public Boolean AllowSequenceEnd() + { + if (EvaluateCurrent() is SequenceEnd) + { + MoveNext(); + return true; + } + + return false; + } + + public Boolean AllowMappingStart(out MappingToken value) + { + if (EvaluateCurrent() is MappingStart mappingStart) + { + value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column); + MoveNext(); + return true; + } + + value = default; + return false; + } + + public Boolean AllowMappingEnd() + { + if (EvaluateCurrent() is MappingEnd) + { + MoveNext(); + return true; + } + + return false; + } + + /// + /// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd. + /// + public void ValidateEnd() + { + if (EvaluateCurrent() is DocumentEnd) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected document end parse event"); + } + + if (EvaluateCurrent() is StreamEnd) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected stream end parse event"); + } + + if (MoveNext()) + { + throw new InvalidOperationException("Expected end of parse events"); + } + } + + /// + /// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart. + /// + public void ValidateStart() + { + if (EvaluateCurrent() != null) + { + throw new InvalidOperationException("Unexpected parser state"); + } + + if (!MoveNext()) + { + throw new InvalidOperationException("Expected a parse event"); + } + + if (EvaluateCurrent() is StreamStart) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected stream start parse event"); + } + + if (EvaluateCurrent() is DocumentStart) + { + MoveNext(); + } + else + { + throw new InvalidOperationException("Expected document start parse event"); + } + } + + private ParsingEvent EvaluateCurrent() + { + if (m_current == null) + { + m_current = m_parser.Current; + if (m_current != null) + { + if (m_current is Scalar scalar) + { + // Verify not using achors + if (scalar.Anchor != null) + { + throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'"); + } + } + else if (m_current is MappingStart mappingStart) + { + // Verify not using achors + if (mappingStart.Anchor != null) + { + throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'"); + } + } + else if (m_current is SequenceStart sequenceStart) + { + // Verify not using achors + if (sequenceStart.Anchor != null) + { + throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'"); + } + } + else if (!(m_current is MappingEnd) && + !(m_current is SequenceEnd) && + !(m_current is DocumentStart) && + !(m_current is DocumentEnd) && + !(m_current is StreamStart) && + !(m_current is StreamEnd)) + { + throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}"); + } + } + } + + return m_current; + } + + private Boolean MoveNext() + { + m_current = null; + return m_parser.MoveNext(); + } + + private BooleanToken ParseBoolean(Scalar scalar) + { + if (MatchBoolean(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_booleanTag); // throws + return default; + } + + private NumberToken ParseFloat(Scalar scalar) + { + if (MatchFloat(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_floatTag); // throws + return default; + } + + private NumberToken ParseInteger(Scalar scalar) + { + if (MatchInteger(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_integerTag); // throws + return default; + } + + private NullToken ParseNull(Scalar scalar) + { + if (MatchNull(scalar, out var token)) + { + return token; + } + + ThrowInvalidValue(scalar, c_nullTag); // throws + return default; + } + + private Boolean MatchBoolean( + Scalar scalar, + out BooleanToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + switch (scalar.Value ?? String.Empty) + { + case "true": + case "True": + case "TRUE": + value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true); + return true; + case "false": + case "False": + case "FALSE": + value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false); + return true; + } + + value = default; + return false; + } + + private Boolean MatchFloat( + Scalar scalar, + out NumberToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + var str = scalar.Value; + if (!String.IsNullOrEmpty(str)) + { + // Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN + switch (str) + { + case ".inf": + case ".Inf": + case ".INF": + case "+.inf": + case "+.Inf": + case "+.INF": + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity); + return true; + case "-.inf": + case "-.Inf": + case "-.INF": + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity); + return true; + case ".nan": + case ".NaN": + case ".NAN": + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN); + return true; + } + + // Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)? + + // Skip leading sign + var index = str[0] == '-' || str[0] == '+' ? 1 : 0; + + // Check for integer portion + var length = str.Length; + var hasInteger = false; + while (index < length && str[index] >= '0' && str[index] <= '9') + { + hasInteger = true; + index++; + } + + // Check for decimal point + var hasDot = false; + if (index < length && str[index] == '.') + { + hasDot = true; + index++; + } + + // Check for decimal portion + var hasDecimal = false; + while (index < length && str[index] >= '0' && str[index] <= '9') + { + hasDecimal = true; + index++; + } + + // Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?) + if ((hasDot && hasDecimal) || hasInteger) + { + // Check for end + if (index == length) + { + // Try parse + if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue); + return true; + } + // Otherwise exceeds range + else + { + ThrowInvalidValue(scalar, c_floatTag); // throws + } + } + // Check [eE][-+]?[0-9] + else if (index < length && (str[index] == 'e' || str[index] == 'E')) + { + index++; + + // Skip sign + if (index < length && (str[index] == '-' || str[index] == '+')) + { + index++; + } + + // Check for exponent + var hasExponent = false; + while (index < length && str[index] >= '0' && str[index] <= '9') + { + hasExponent = true; + index++; + } + + // Check for end + if (hasExponent && index == length) + { + // Try parse + if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue); + return true; + } + // Otherwise exceeds range + else + { + ThrowInvalidValue(scalar, c_floatTag); // throws + } + } + } + } + } + + value = default; + return false; + } + + private Boolean MatchInteger( + Scalar scalar, + out NumberToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + var str = scalar.Value; + if (!String.IsNullOrEmpty(str)) + { + // Check for [0-9]+ + var firstChar = str[0]; + if (firstChar >= '0' && firstChar <= '9' && + str.Skip(1).All(x => x >= '0' && x <= '9')) + { + // Try parse + if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue); + return true; + } + + // Otherwise exceeds range + ThrowInvalidValue(scalar, c_integerTag); // throws + } + // Check for (-|+)[0-9]+ + else if ((firstChar == '-' || firstChar == '+') && + str.Length > 1 && + str.Skip(1).All(x => x >= '0' && x <= '9')) + { + // Try parse + if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue); + return true; + } + + // Otherwise exceeds range + ThrowInvalidValue(scalar, c_integerTag); // throws + } + // Check for 0x[0-9a-fA-F]+ + else if (firstChar == '0' && + str.Length > 2 && + str[1] == 'x' && + str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F'))) + { + // Try parse + if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue)) + { + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue); + return true; + } + + // Otherwise exceeds range + ThrowInvalidValue(scalar, c_integerTag); // throws + } + // Check for 0o[0-9]+ + else if (firstChar == '0' && + str.Length > 2 && + str[1] == 'o' && + str.Skip(2).All(x => x >= '0' && x <= '7')) + { + // Try parse + var integerValue = default(Int32); + try + { + integerValue = Convert.ToInt32(str.Substring(2), 8); + } + // Otherwise exceeds range + catch (Exception) + { + ThrowInvalidValue(scalar, c_integerTag); // throws + } + + value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue); + return true; + } + } + + value = default; + return false; + } + + private Boolean MatchNull( + Scalar scalar, + out NullToken value) + { + // YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923 + switch (scalar.Value ?? String.Empty) + { + case "": + case "null": + case "Null": + case "NULL": + case "~": + value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column); + return true; + } + + value = default; + return false; + } + + private void ThrowInvalidValue( + Scalar scalar, + String tag) + { + throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'"); + } + + private const String c_booleanTag = "tag:yaml.org,2002:bool"; + private const String c_floatTag = "tag:yaml.org,2002:float"; + private const String c_integerTag = "tag:yaml.org,2002:int"; + private const String c_nullTag = "tag:yaml.org,2002:null"; + private const String c_stringTag = "tag:yaml.org,2002:string"; + private readonly Int32? m_fileId; + private readonly Parser m_parser; + private ParsingEvent m_current; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectWriter.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectWriter.cs new file mode 100644 index 00000000000..27b92186557 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlObjectWriter.cs @@ -0,0 +1,73 @@ +using System; +using System.Globalization; +using System.IO; +using GitHub.DistributedTask.ObjectTemplating; +using YamlDotNet.Core.Events; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + /// + /// Converts a TemplateToken into YAML + /// + internal sealed class YamlObjectWriter : IObjectWriter + { + internal YamlObjectWriter(StringWriter writer) + { + m_emitter = new YamlDotNet.Core.Emitter(writer); + } + + public void WriteString(String value) + { + m_emitter.Emit(new Scalar(value ?? String.Empty)); + } + + public void WriteBoolean(Boolean value) + { + m_emitter.Emit(new Scalar(value ? "true" : "false")); + } + + public void WriteNumber(Double value) + { + m_emitter.Emit(new Scalar(value.ToString("G15", CultureInfo.InvariantCulture))); + } + + public void WriteNull() + { + m_emitter.Emit(new Scalar("null")); + } + + public void WriteSequenceStart() + { + m_emitter.Emit(new SequenceStart(null, null, true, SequenceStyle.Block)); + } + + public void WriteSequenceEnd() + { + m_emitter.Emit(new SequenceEnd()); + } + + public void WriteMappingStart() + { + m_emitter.Emit(new MappingStart()); + } + + public void WriteMappingEnd() + { + m_emitter.Emit(new MappingEnd()); + } + + public void WriteStart() + { + m_emitter.Emit(new StreamStart()); + m_emitter.Emit(new DocumentStart()); + } + + public void WriteEnd() + { + m_emitter.Emit(new DocumentEnd(isImplicit: true)); + m_emitter.Emit(new StreamEnd()); + } + + private readonly YamlDotNet.Core.IEmitter m_emitter; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlTemplateLoader.cs b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlTemplateLoader.cs new file mode 100644 index 00000000000..3d44a299ded --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ObjectTemplating/YamlTemplateLoader.cs @@ -0,0 +1,251 @@ +using System; +using System.Collections.Generic; +using System.IO; +using GitHub.DistributedTask.ObjectTemplating; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ + /// + /// Loads a YAML file, and returns the parsed TemplateToken + /// + internal sealed class YamlTemplateLoader + { + public YamlTemplateLoader( + ParseOptions parseOptions, + IFileProvider fileProvider) + { + m_parseOptions = new ParseOptions(parseOptions); + m_fileProvider = fileProvider ?? throw new ArgumentNullException(nameof(fileProvider)); + } + + public TemplateToken LoadFile( + TemplateContext context, + Int32? rootFileId, + String scope, + String path, + String templateType) + { + if (context.Errors.Count > 0) + { + throw new InvalidOperationException("Expected error count to be 0 when attempting to load a new file"); + } + + // Is entry file? + var isEntryFile = m_referencedFiles.Count == 0; + + // Root the path + path = m_fileProvider.ResolvePath(null, path); + + // Validate max files + m_referencedFiles.Add(path); + if (m_parseOptions.MaxFiles > 0 && m_referencedFiles.Count > m_parseOptions.MaxFiles) + { + throw new InvalidOperationException($"The maximum file count of {m_parseOptions.MaxFiles} has been exceeded"); + } + + // Get the file ID + var fileId = context.GetFileId(path); + + // Check the cache + if (!m_cache.TryGetValue(path, out String fileContent)) + { + // Fetch the file + context.CancellationToken.ThrowIfCancellationRequested(); + fileContent = m_fileProvider.GetFileContent(path); + + // Validate max file size + if (fileContent.Length > m_parseOptions.MaxFileSize) + { + throw new InvalidOperationException($"The maximum file size of {m_parseOptions.MaxFileSize} characters has been exceeded"); + } + + // Cache + m_cache[path] = fileContent; + } + + // Deserialize + var token = default(TemplateToken); + using (var stringReader = new StringReader(fileContent)) + { + var yamlObjectReader = new YamlObjectReader(fileId, stringReader); + token = TemplateReader.Read(context, templateType, yamlObjectReader, fileId, out _); + } + + // Trace + if (!isEntryFile) + { + context.TraceWriter.Info(String.Empty); + } + context.TraceWriter.Info("# "); + context.TraceWriter.Info("# {0}", path); + context.TraceWriter.Info("# "); + + // Validate ref names + if (context.Errors.Count == 0) + { + switch (templateType) + { + case PipelineTemplateConstants.WorkflowRoot: + ValidateWorkflow(context, scope, token); + break; + case PipelineTemplateConstants.StepsTemplateRoot: + var stepsTemplate = token.AssertMapping("steps template"); + foreach (var stepsTemplateProperty in stepsTemplate) + { + var stepsTemplatePropertyName = stepsTemplateProperty.Key.AssertString("steps template property name"); + switch (stepsTemplatePropertyName.Value) + { + case PipelineTemplateConstants.Steps: + ValidateSteps(context, scope, stepsTemplateProperty.Value); + break; + } + } + break; + default: + throw new NotImplementedException($"Unexpected template type '{templateType}' when loading yaml file"); + } + } + + return token; + } + + private void ValidateWorkflow( + TemplateContext context, + String scope, + TemplateToken token) + { + var workflow = token.AssertMapping("workflow"); + foreach (var workflowProperty in workflow) + { + var workflowPropertyName = workflowProperty.Key.AssertString("workflow property name"); + switch (workflowPropertyName.Value) + { + case PipelineTemplateConstants.Jobs: + case PipelineTemplateConstants.Workflow: + var jobs = workflowProperty.Value.AssertMapping("workflow property value"); + foreach (var jobsProperty in jobs) + { + var job = jobsProperty.Value.AssertMapping("jobs property value"); + foreach (var jobProperty in job) + { + var jobPropertyName = jobProperty.Key.AssertString("job property name"); + switch (jobPropertyName.Value) + { + case PipelineTemplateConstants.Steps: + ValidateSteps(context, scope, jobProperty.Value); + break; + } + } + } + break; + } + } + } + + private void ValidateSteps( + TemplateContext context, + String scope, + TemplateToken token) + { + var nameBuilder = new ReferenceNameBuilder(); + var steps = token.AssertSequence("steps"); + var needsReferenceName = new List(); + foreach (var stepsItem in steps) + { + var step = stepsItem.AssertMapping("steps item"); + var isTemplateReference = false; + var hasReferenceName = false; + foreach (var stepProperty in step) + { + var stepPropertyKey = stepProperty.Key.AssertString("step property name"); + switch (stepPropertyKey.Value) + { + // Validate reference names + case PipelineTemplateConstants.Id: + var referenceNameLiteral = stepProperty.Value.AssertString("step ID"); + var referenceName = referenceNameLiteral.Value; + if (String.IsNullOrEmpty(referenceName)) + { + continue; + } + + if (!nameBuilder.TryAddKnownName(referenceName, out var error)) + { + context.Error(referenceNameLiteral, error); + } + + hasReferenceName = true; + break; + + case PipelineTemplateConstants.Template: + isTemplateReference = true; + break; + } + } + + // No reference name + if (isTemplateReference && !hasReferenceName) + { + needsReferenceName.Add(step); + } + + // Stamp the scope + if (!String.IsNullOrEmpty(scope)) + { + var scopePropertyName = new StringToken(null, null, null, PipelineTemplateConstants.Scope); + var scopePropertyValue = new StringToken(null, null, null, scope); + step.Add(scopePropertyName, scopePropertyValue); + context.Memory.AddBytes(scopePropertyName); + context.Memory.AddBytes(scopePropertyValue); + } + } + + // Generate reference names + if (needsReferenceName.Count > 0 && context.Errors.Count == 0) + { + foreach (var step in needsReferenceName) + { + // Get the template path + var templatePath = default(String); + foreach (var stepProperty in step) + { + var stepPropertyKey = stepProperty.Key.AssertString("step property name"); + switch (stepPropertyKey.Value) + { + case PipelineTemplateConstants.Template: + var templateStringToken = stepProperty.Value.AssertString("step template path"); + templatePath = templateStringToken.Value; + break; + } + } + + // Generate reference name + if (!String.IsNullOrEmpty(templatePath)) + { + nameBuilder.AppendSegment(templatePath); + var generatedIdPropertyName = new StringToken(null, null, null, PipelineTemplateConstants.GeneratedId); + var generatedIdPropertyValue = new StringToken(null, null, null, nameBuilder.Build()); + step.Add(generatedIdPropertyName, generatedIdPropertyValue); + context.Memory.AddBytes(generatedIdPropertyName); + context.Memory.AddBytes(generatedIdPropertyValue); + } + } + } + } + + /// + /// Cache of file content + /// + private readonly Dictionary m_cache = new Dictionary(StringComparer.OrdinalIgnoreCase); + + private readonly IFileProvider m_fileProvider; + + private readonly ParseOptions m_parseOptions; + + /// + /// Tracks unique file references + /// + private readonly HashSet m_referencedFiles = new HashSet(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PackageStore.cs b/src/Sdk/DTPipelines/Pipelines/PackageStore.cs new file mode 100644 index 00000000000..20fc8704a1c --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PackageStore.cs @@ -0,0 +1,51 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + public interface IPackageResolver + { + IList GetPackages(String packageType); + } + + public class PackageStore : IPackageStore + { + public PackageStore(params PackageMetadata[] packages) + : this(packages, null) + { + } + + public PackageStore( + IEnumerable packages = null, + IPackageResolver resolver = null) + { + this.Resolver = resolver; + m_packages = packages?.GroupBy(x => x.Type).ToDictionary(x => x.Key, x => x.ToList(), StringComparer.OrdinalIgnoreCase) ?? + new Dictionary>(StringComparer.OrdinalIgnoreCase); + } + + public IPackageResolver Resolver + { + get; + } + + public PackageVersion GetLatestVersion(String packageType) + { + if (!m_packages.TryGetValue(packageType, out var existingPackages)) + { + var resolvedPackages = this.Resolver?.GetPackages(packageType); + if (resolvedPackages?.Count > 0) + { + existingPackages = resolvedPackages.ToList(); + m_packages[packageType] = existingPackages; + } + } + + return existingPackages?.OrderByDescending(x => x.Version).Select(x => x.Version).FirstOrDefault(); + } + + private Dictionary> m_packages; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ParallelExecutionOptions.cs b/src/Sdk/DTPipelines/Pipelines/ParallelExecutionOptions.cs new file mode 100644 index 00000000000..5bd5be2c4ef --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ParallelExecutionOptions.cs @@ -0,0 +1,315 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class ParallelExecutionOptions + { + public ParallelExecutionOptions() + { + } + + private ParallelExecutionOptions(ParallelExecutionOptions optionsToCopy) + { + this.Matrix = optionsToCopy.Matrix; + this.MaxConcurrency = optionsToCopy.MaxConcurrency; + } + + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter>>))] + public ExpressionValue>> Matrix + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue MaxConcurrency + { + get; + set; + } + + public ParallelExecutionOptions Clone() + { + return new ParallelExecutionOptions(this); + } + + internal JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + String jobName, + Int32 attempt, + ExpressionValue container, + IDictionary> sidecarContainers, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory) + { + // perform regular expansion with a filter + var options = new JobExpansionOptions(jobName, attempt); + + return GenerateJobContexts( + context, + container, + sidecarContainers, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + jobFactory, + options) + .FirstOrDefault(); + } + + internal ExpandPhaseResult Expand( + PhaseExecutionContext context, + ExpressionValue container, + IDictionary> sidecarContainers, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory, + JobExpansionOptions options) + { + var jobContexts = GenerateJobContexts( + context, + container, + sidecarContainers, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + jobFactory, + options); + + var result = new ExpandPhaseResult(); + foreach (var c in jobContexts) + { + result.Jobs.Add(c.Job); + } + + // parse MaxConcurrency request + var numberOfJobs = jobContexts.Count; + var userProvidedValue = context.Evaluate( + name: nameof(MaxConcurrency), + expression: this.MaxConcurrency, + defaultValue: 0).Value; + + // setting max to 0 or less is shorthand for "unlimited" + if (userProvidedValue <= 0) + { + userProvidedValue = numberOfJobs; + } + + result.MaxConcurrency = userProvidedValue; + return result; + } + + internal IList GenerateJobContexts( + PhaseExecutionContext context, + ExpressionValue container, + IDictionary> sidecarContainers, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory, + JobExpansionOptions options) + { + // We don't want job variables to be set into the phase context so we create a child context for each unique configuration + var jobContexts = new List(); + void GenerateContext( + String displayName, + String configuration, + IDictionary configurationVariables = null, + String parallelExecutionType = null, + Int32 positionInPhase = 1, + Int32 totalJobsInPhase = 1) + { + // configurations should (eventually) follow configuration naming conventions + if (String.IsNullOrEmpty(configuration)) + { + configuration = PipelineConstants.DefaultJobName; + } + + // Determine attempt number. + // if we have a custom value, it wins. + // if we have previously attempted this configuration, + // the new attempt number is one greater than the previous. + // 1 is the minimum attempt number + var attemptNumber = options?.GetAttemptNumber(configuration) ?? -1; + if (attemptNumber < 1) + { + var previousAttempt = context.PreviousAttempt; + if (previousAttempt != null) + { + var jobInstance = context.PreviousAttempt?.Jobs.FirstOrDefault(x => x.Job.Name.Equals(configuration, StringComparison.OrdinalIgnoreCase)); + if (jobInstance != null) + { + attemptNumber = jobInstance.Job.Attempt + 1; + } + } + } + + if (attemptNumber < 1) + { + attemptNumber = 1; + } + + var jobContext = context.CreateJobContext( + name: configuration, + attempt: attemptNumber, + positionInPhase, + totalJobsInPhase); + + // add parallel execution type + if (parallelExecutionType != null) + { + jobContext.SetSystemVariables(new List + { + new Variable + { + Name = WellKnownDistributedTaskVariables.ParallelExecutionType, + Value = parallelExecutionType + } + }); + } + + if (configurationVariables != null) + { + jobContext.SetUserVariables(configurationVariables); + } + + // create job model from factory + jobContext.Job.Definition = jobFactory.CreateJob( + jobContext, + container, + sidecarContainers, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + displayName); + + jobContexts.Add(jobContext); + + if (jobContexts.Count > context.ExecutionOptions.MaxJobExpansion) + { + // Note: this is a little weird: it might be that the max concurrency is greater than the max expansion, + // but we only throw if we actually try to generate more jobs than the max expansion. + throw new MaxJobExpansionException(PipelineStrings.PhaseJobSlicingExpansionExceedLimit(jobContexts.Count.ToString(), context.ExecutionOptions.MaxJobExpansion)); + } + } + + if (this.Matrix != null) + { + var matrixValue = context.Evaluate(nameof(Matrix), this.Matrix, null, traceDefault: false).Value; + var numberOfConfigurations = matrixValue?.Count ?? 0; + if (numberOfConfigurations > 0) + { + var positionInPhase = 1; + foreach (var pair in matrixValue) + { + // user-provided configuration key + var configuration = pair.Key; + var refName = configuration; + if (!PipelineUtilities.IsLegalNodeName(refName)) + { + var legalNodeName = PipelineConstants.DefaultJobDisplayName + positionInPhase.ToString(); + context.Trace?.Info($"\"{refName}\" is not a legal node name; node will be named \"{legalNodeName}\"."); + if (context.ExecutionOptions.EnforceLegalNodeNames) + { + refName = legalNodeName; + } + } + + if (options == null || options.IsIncluded(refName)) + { + GenerateContext( + displayName: Phase.GenerateDisplayName(context.Phase.Definition, configuration), + configuration: refName, + configurationVariables: pair.Value, + parallelExecutionType: "MultiConfiguration", + positionInPhase: positionInPhase, + totalJobsInPhase: numberOfConfigurations); + } + + ++positionInPhase; + } + } + } + else if (this.MaxConcurrency is var maxConcurrencyPipelineValue && maxConcurrencyPipelineValue != null) + { + var maxConcurrency = context.Evaluate(nameof(maxConcurrencyPipelineValue), maxConcurrencyPipelineValue, 1).Value; + + //If the value of context.ExecutionOptions.MaxParallelism is set, we will enforce MaxConcurrency value to be not more than context.ExecutionOptions.MaxParallelism. + //context.ExecutionOptions.MaxParallelism is currently set if the current context is hosted and public, especially to avoid abuse of services. + if (maxConcurrency > context.ExecutionOptions.MaxParallelism) + { + maxConcurrency = context.ExecutionOptions.MaxParallelism.Value; + } + + if (maxConcurrency > 1) + { + if (options == null || options.Configurations == null || options.Configurations.Count == 0) + { + // generate all slices + for (var positionInPhase = 1; positionInPhase <= maxConcurrency; ++positionInPhase) + { + // NOTE: for historical reasons, the reference name of a slice is "Job" plus the slice number: "Job1" + var positionInPhaseString = positionInPhase.ToString(); + GenerateContext( + displayName: Phase.GenerateDisplayName(context.Phase.Definition, positionInPhaseString), + configuration: PipelineConstants.DefaultJobDisplayName + positionInPhaseString, + configurationVariables: null, + parallelExecutionType: "MultiMachine", + positionInPhase: positionInPhase, + totalJobsInPhase: maxConcurrency); + } + } + else + { + // generate only the requested slices + foreach (var configuration in options.Configurations.Keys) + { + // determine which slices are required by parsing the configuration name (see generation code above) + var prefix = PipelineConstants.DefaultJobDisplayName; + if (!configuration.StartsWith(prefix, StringComparison.OrdinalIgnoreCase) + || !int.TryParse(configuration.Substring(prefix.Length), out var positionInPhase)) + throw new PipelineValidationException(PipelineStrings.PipelineNotValid()); + + GenerateContext( + displayName: Phase.GenerateDisplayName(context.Phase.Definition, positionInPhase.ToString()), + configuration: configuration, + configurationVariables: null, + parallelExecutionType: "MultiMachine", + positionInPhase: positionInPhase, + totalJobsInPhase: maxConcurrency); + } + } + } + } + + // if no contexts are produced otherwise, create a default context. + if (jobContexts.Count == 0) + { + var configuration = PipelineConstants.DefaultJobName; + if (options == null || options.IsIncluded(configuration)) + { + // the default display name is just the JobFactory display name + GenerateContext( + displayName: Phase.GenerateDisplayName(context.Phase.Definition), + configuration: configuration); + } + } + + return jobContexts; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Phase.cs b/src/Sdk/DTPipelines/Pipelines/Phase.cs new file mode 100644 index 00000000000..2f44e4afcca --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Phase.cs @@ -0,0 +1,1677 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Diagnostics; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class Phase : PhaseNode, IJobFactory + { + public Phase() + { + } + + private Phase(Phase phaseToCopy) + : base(phaseToCopy) + { + if (phaseToCopy.m_steps != null && phaseToCopy.m_steps.Count > 0) + { + m_steps = new List(phaseToCopy.m_steps.Select(x => x.Clone())); + } + } + + /// + /// Gets the phase type. + /// + [DataMember(EmitDefaultValue = false)] + public override PhaseType Type => PhaseType.Phase; + + /// + /// Gets the list of steps associated with this phase. At runtime the steps will be used as a template for + /// the execution of a job. + /// + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + /// + /// Creates the specified job using the provided execution context and name. A new execution context is + /// returned which includes new variables set by the job. + /// + /// The execution context + /// The name of the job which should be created + /// A job and execution context if the specified name exists; otherwise, null + public JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + String name, + Int32 attempt) + { + ArgumentUtility.CheckForNull(this.Target, nameof(this.Target)); + + // Create a copy of the context so the same root context may be used to create multiple jobs + // without impacting the input context. + return this.Target.CreateJobContext(context, name, attempt, this); + } + + /// + /// Creates a job context using the provided phase context and existing job instance. A new context is + /// returned which includes new variables set by the job. + /// + /// The execution context + /// The existing job instance + /// A job execution context + public JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + JobInstance jobInstance) + { + var jobContext = context.CreateJobContext(jobInstance); + jobContext.Job.Definition.Id = jobContext.GetInstanceId(); + + var options = new BuildOptions(); + var builder = new PipelineBuilder(context); + var result = builder.GetReferenceResources(jobInstance.Definition.Steps.OfType().ToList(), jobInstance.Definition.Target); + jobContext.ReferencedResources.MergeWith(result); + + // Update the execution context with referenced job containers + var containerAlias = (jobInstance.Definition.Container as DistributedTask.ObjectTemplating.Tokens.StringToken)?.Value; + if (!String.IsNullOrEmpty(containerAlias)) + { + UpdateJobContextReferencedContainers(jobContext, containerAlias); + } + var sidecarContainers = jobInstance.Definition.SidecarContainers; + if (sidecarContainers != null) + { + foreach (var sidecar in sidecarContainers) + { + // Sidecar is serviceName -> containerAlias, e.g. ngnix: containerAlias + UpdateJobContextReferencedContainers(jobContext, sidecar.Value); + } + } + // Update the execution context with the job-specific system variables + UpdateJobContextVariablesFromJob(jobContext, jobInstance.Definition); + + return jobContext; + } + + /// + /// Expands the template using the provided execution context and returns the list of jobs. + /// + /// The execution context + /// The expansion options to use + /// A list of jobs which should be executed for this phase + public ExpandPhaseResult Expand( + PhaseExecutionContext context, + JobExpansionOptions options = null) + { + ArgumentUtility.CheckForNull(this.Target, nameof(this.Target)); + + var result = this.Target.Expand(context, this, options); + if (result != null) + { + var runtimeValue = this.ContinueOnError?.GetValue(context); + result.ContinueOnError = runtimeValue?.Value ?? false; + } + + return result; + } + + internal static String GetErrorMessage( + String code, + params Object[] values) + { + var stageName = (String)values[0]; + if (String.IsNullOrEmpty(stageName) || + stageName.Equals(PipelineConstants.DefaultJobName, StringComparison.OrdinalIgnoreCase)) + { + switch (code) + { + case PipelineConstants.NameInvalid: + return PipelineStrings.PhaseNameInvalid(values[1]); + + case PipelineConstants.NameNotUnique: + return PipelineStrings.PhaseNamesMustBeUnique(values[1]); + + case PipelineConstants.StartingPointNotFound: + return PipelineStrings.PipelineNotValidNoStartingPhase(); + + case PipelineConstants.DependencyNotFound: + return PipelineStrings.PhaseDependencyNotFound(values[1], values[2]); + + case PipelineConstants.GraphContainsCycle: + return PipelineStrings.PhaseGraphCycleDetected(values[1], values[2]); + } + } + else + { + switch (code) + { + case PipelineConstants.NameInvalid: + return PipelineStrings.StagePhaseNameInvalid(values[0], values[1]); + + case PipelineConstants.NameNotUnique: + return PipelineStrings.StagePhaseNamesMustBeUnique(values[0], values[1]); + + case PipelineConstants.StartingPointNotFound: + return PipelineStrings.StageNotValidNoStartingPhase(values[0]); + + case PipelineConstants.DependencyNotFound: + return PipelineStrings.StagePhaseDependencyNotFound(values[0], values[1], values[2]); + + case PipelineConstants.GraphContainsCycle: + return PipelineStrings.StagePhaseGraphCycleDetected(values[0], values[1], values[2]); + } + } + + throw new NotSupportedException(); + } + + /// + /// Resolves external references and ensures the steps are compatible with the selected target. + /// + /// The validation context + public override void Validate( + PipelineBuildContext context, + ValidationResult result) + { + base.Validate(context, result); + + StepValidationResult phaseStepValidationResult = new StepValidationResult(); + // Require the latest agent version. + if (context.BuildOptions.DemandLatestAgent) + { + var latestPackageVersion = context.PackageStore?.GetLatestVersion(WellKnownPackageTypes.Agent); + if (latestPackageVersion == null) + { + throw new NotSupportedException("Unable to determine the latest agent package version"); + } + + phaseStepValidationResult.MinAgentVersion = latestPackageVersion.ToString(); + } + + if (context.EnvironmentVersion < 2) + { + // environment version 1 should has at most 1 checkout step, the position of the checkout task might not be the fisrt one of there is an Azure keyvault task + var checkoutStep = this.Steps.SingleOrDefault(x => x.IsCheckoutTask()); + if (checkoutStep != null) + { + if ((checkoutStep as TaskStep).Inputs[PipelineConstants.CheckoutTaskInputs.Repository] == PipelineConstants.NoneAlias) + { + this.Variables.Add(new Variable() { Name = "agent.source.skip", Value = Boolean.TrueString }); + } + + this.Steps.Remove(checkoutStep); + } + } + + ValidateSteps(context, this, Target, result, this.Steps, phaseStepValidationResult); + + // Resolve the target to ensure we have stable identifiers for the orchestration engine + // phase targets with expressions need to be evaluated against resolved job contexts. + bool validateTarget = false; + if (this.Target.Type == PhaseTargetType.Pool || this.Target.Type == PhaseTargetType.Server) + { + validateTarget = true; + } + else if (this.Target is AgentQueueTarget agentQueueTarget && agentQueueTarget.IsLiteral()) + { + validateTarget = true; + } + + if (validateTarget) + { + this.Target.Validate( + context, + context.BuildOptions, + result, + this.Steps, + phaseStepValidationResult.TaskDemands); + } + } + + // todo: merge JobFactory.cs and Phase.cs and then make this private + internal static void ValidateSteps( + PipelineBuildContext context, + PhaseNode phase, + PhaseTarget phaseTarget, + ValidationResult result, + IList steps, + StepValidationResult phaseStepValidationResult) + { + var stepsCopy = new List(); + foreach (var step in steps) + { + // Skip if not enabled on the definition. + if (!step.Enabled) + { + continue; + } + + if (step.Type == StepType.Task) + { + var taskErrors = ValidateTaskStep(context, phase, phaseTarget, result.ReferencedResources, result.UnauthorizedResources, (step as TaskStep), phaseStepValidationResult); + if (taskErrors.Count == 0) + { + stepsCopy.Add(step); + } + else + { + result.Errors.AddRange(taskErrors); + } + } + else if (step.Type == StepType.Group) + { + var groupErrors = ValidateGroupStep(context, phase, phaseTarget, result.ReferencedResources, result.UnauthorizedResources, (step as GroupStep), phaseStepValidationResult); + if (groupErrors.Count == 0) + { + stepsCopy.Add(step); + } + else + { + result.Errors.AddRange(groupErrors); + } + } + else if (step.Type == StepType.Action) + { + var actionErrors = ValidateActionStep(context, phase, step as ActionStep, phaseStepValidationResult); + if (actionErrors.Count == 0) + { + stepsCopy.Add(step); + } + else + { + result.Errors.AddRange(actionErrors); + } + } + else + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.StepNotSupported())); + } + } + + // Now replace the steps list with our updated list based on disabled/missing tasks + steps.Clear(); + steps.AddRange(stepsCopy); + + // Now go through any tasks which did not have a name specified and name them according to how many + // of that specific task is present. + if (phaseStepValidationResult.UnnamedSteps.Count > 0) + { + GenerateDefaultTaskNames(phaseStepValidationResult.KnownNames, phaseStepValidationResult.UnnamedSteps); + } + + // Make sure our computed minimum agent version is included with the task demands + if (phaseStepValidationResult.MinAgentVersion != null) + { + phaseStepValidationResult.TaskDemands.Add(new DemandMinimumVersion(PipelineConstants.AgentVersionDemandName, phaseStepValidationResult.MinAgentVersion)); + } + } + + private static List ValidateActionStep( + PipelineBuildContext context, + PhaseNode phase, + ActionStep actionStep, + StepValidationResult stepValidationResult) + { + List actionErrors = new List(); + + // We need an action reference to a contianer image or repository + if (actionStep.Reference == null) + { + // Stop checking further since we can't even find an action definition + actionErrors.Add(new PipelineValidationError(PipelineStrings.StepActionReferenceInvalid(phase.Name, actionStep.Name))); + return actionErrors; + } + + string defaultActionName = ""; + if (actionStep.Reference.Type == ActionSourceType.ContainerRegistry) + { + // action is reference to an image from container registry + var containerAction = actionStep.Reference as ContainerRegistryReference; + defaultActionName = NameValidation.Sanitize(containerAction.Image, context.BuildOptions.AllowHyphenNames); + } + else if (actionStep.Reference.Type == ActionSourceType.Repository) + { + // action is reference to dockerfile or action.js from a git repository + var repoAction = actionStep.Reference as RepositoryPathReference; + defaultActionName = NameValidation.Sanitize(repoAction.Name ?? PipelineConstants.SelfAlias, context.BuildOptions.AllowHyphenNames); + } + else if (actionStep.Reference.Type == ActionSourceType.Script) + { + defaultActionName = "run"; + } + else + { + actionErrors.Add(new PipelineValidationError(PipelineStrings.TaskStepReferenceInvalid(phase.Name, actionStep.Name, actionStep.Reference.Type))); + } + + // Validate task name + var stepNameError = ValidateStepName(context, phase, stepValidationResult, actionStep, defaultActionName); + if (stepNameError != null) + { + actionErrors.Add(stepNameError); + } + + return actionErrors; + } + + private static List ValidateTaskStep( + PipelineBuildContext context, + PhaseNode phase, + PhaseTarget phaseTarget, + PipelineResources referencedResources, + PipelineResources unauthorizedResources, + TaskStep taskStep, + StepValidationResult stepValidationResult) + { + List taskErrors = new List(); + + // We need either a task name or an identifier and a version. + if (taskStep.Reference == null || + taskStep.Reference.Version == null || + (taskStep.Reference.Id == Guid.Empty && String.IsNullOrEmpty(taskStep.Reference.Name))) + { + // Stop checking further since we can't even resolve task definition + taskErrors.Add(new PipelineValidationError(PipelineStrings.StepTaskReferenceInvalid(phase.Name, taskStep.Name))); + return taskErrors; + } + + // Try to resolve by the identifier first, then by name + TaskDefinition resolvedTask = null; + try + { + if (taskStep.Reference.Id != Guid.Empty) + { + resolvedTask = context.TaskStore?.ResolveTask(taskStep.Reference.Id, taskStep.Reference.Version); + } + else if (!String.IsNullOrEmpty(taskStep.Reference.Name)) + { + resolvedTask = context.TaskStore?.ResolveTask(taskStep.Reference.Name, taskStep.Reference.Version); + } + } + catch (AmbiguousTaskSpecificationException ex) + { + // Stop checking further since we can't even resolve task definition + taskErrors.Add(new PipelineValidationError(PipelineStrings.TaskStepReferenceInvalid(phase.Name, taskStep.Name, ex.Message))); + return taskErrors; + } + + // Make sure we were able to find the task with the provided reference data + if (resolvedTask == null || resolvedTask.Disabled) + { + // Stop checking further since we can't even resolve task definition + String name = taskStep.Reference.Id != Guid.Empty ? taskStep.Reference.Id.ToString() : taskStep.Reference.Name; + taskErrors.Add(new PipelineValidationError(PipelineStrings.TaskMissing(phase.Name, taskStep.Name, name, taskStep.Reference.Version))); + return taskErrors; + } + + // Make sure this step is compatible with the target used by this phase + if (phaseTarget.IsValid(resolvedTask) == false) + { + // Stop checking further since the task is not for valid for the target + taskErrors.Add(new PipelineValidationError(PipelineStrings.TaskInvalidForGivenTarget(phase.Name, taskStep.Name, resolvedTask.Name, resolvedTask.Version))); + return taskErrors; + } + + // Resolve the task version to pin a given task for the duration of the plan + taskStep.Reference.Id = resolvedTask.Id; + taskStep.Reference.Name = resolvedTask.Name; + taskStep.Reference.Version = resolvedTask.Version; + + // Make sure that we have valid syntax for a condition statement + var conditionError = ValidateStepCondition(context, phase, taskStep.Name, taskStep.Condition); + if (conditionError != null) + { + taskErrors.Add(conditionError); + } + + // Resolves values from inputs based on the provided validation options + var inputErrors = ResolveInputs(context, phase, referencedResources, unauthorizedResources, taskStep, resolvedTask); + if (inputErrors.Count > 0) + { + taskErrors.AddRange(inputErrors); + } + + // Task names do not have to correspond to the same rules as reference names, so we need to remove + // any characters which are considered invalid for a reference name from the task definition name. + var defaultTaskName = NameValidation.Sanitize(taskStep.Reference.Name, context.BuildOptions.AllowHyphenNames); + + // Validate task name + var stepNameError = ValidateStepName(context, phase, stepValidationResult, taskStep, defaultTaskName); + if (stepNameError != null) + { + taskErrors.Add(stepNameError); + } + + // Now union any demand which are satisifed by tasks within the job + stepValidationResult.TasksSatisfy.UnionWith(resolvedTask.Satisfies); + + stepValidationResult.MinAgentVersion = resolvedTask.GetMinimumAgentVersion(stepValidationResult.MinAgentVersion); + + // Add demands from task + var unsatisfiedDemands = resolvedTask.Demands.Where(d => !stepValidationResult.TasksSatisfy.Contains(d.Name)); + if (unsatisfiedDemands.Any()) + { + stepValidationResult.TaskDemands.UnionWith(unsatisfiedDemands); + } + + return taskErrors; + } + + private static List ValidateGroupStep( + PipelineBuildContext context, + PhaseNode phase, + PhaseTarget phaseTarget, + PipelineResources referencedResources, + PipelineResources unauthorizedResources, + GroupStep groupStep, + StepValidationResult stepValidationResult) + { + List groupErrors = new List(); + + // Make sure that we have valid syntax for a condition statement + var conditionError = ValidateStepCondition(context, phase, groupStep.Name, groupStep.Condition); + if (conditionError != null) + { + groupErrors.Add(conditionError); + } + + // ValidationResult for steps within group, since only steps within a group need to have unique task.name + StepValidationResult groupStepsValidationResult = new StepValidationResult(); + + var stepsCopy = new List(); + foreach (var step in groupStep.Steps) + { + // Skip if not enabled on the definition. + if (!step.Enabled) + { + continue; + } + + var taskErrors = ValidateTaskStep(context, phase, phaseTarget, referencedResources, unauthorizedResources, step, groupStepsValidationResult); + if (taskErrors.Count == 0) + { + stepsCopy.Add(step); + } + else + { + groupErrors.AddRange(taskErrors); + } + } + + // Now replace the steps list with our updated list based on disabled/missing tasks + groupStep.Steps.Clear(); + groupStep.Steps.AddRange(stepsCopy); + + // Merge group steps validation result + if (groupStepsValidationResult.UnnamedSteps.Count > 0) + { + // Now go through any tasks within a group which did not have a name specified and name them according to how many + // of that specific task is present. + GenerateDefaultTaskNames(groupStepsValidationResult.KnownNames, groupStepsValidationResult.UnnamedSteps); + } + + // If group min agent version > current min agent version + if (DemandMinimumVersion.CompareVersion(groupStepsValidationResult.MinAgentVersion, stepValidationResult.MinAgentVersion) > 0) + { + stepValidationResult.MinAgentVersion = groupStepsValidationResult.MinAgentVersion; + } + + // Add tasks satisfies provided by the group + stepValidationResult.TasksSatisfy.UnionWith(groupStepsValidationResult.TasksSatisfy); + + // Add demands come from tasks within the group + var unsatisfiedDemands = groupStepsValidationResult.TaskDemands.Where(d => !stepValidationResult.TasksSatisfy.Contains(d.Name)); + if (unsatisfiedDemands.Any()) + { + stepValidationResult.TaskDemands.UnionWith(unsatisfiedDemands); + } + + // Validate group name + var stepNameError = ValidateStepName(context, phase, stepValidationResult, groupStep, "group"); + if (stepNameError != null) + { + groupErrors.Add(stepNameError); + } + + return groupErrors; + } + + private static PipelineValidationError ValidateStepName( + PipelineBuildContext context, + PhaseNode phase, + StepValidationResult stepValidationResult, + JobStep step, + String defaultName) + { + if (String.IsNullOrEmpty(step.Name)) + { + List stepsToName; + if (!stepValidationResult.UnnamedSteps.TryGetValue(defaultName, out stepsToName)) + { + stepsToName = new List(); + stepValidationResult.UnnamedSteps.Add(defaultName, stepsToName); + } + + stepsToName.Add(step); + + if (String.IsNullOrEmpty(step.DisplayName)) + { + step.DisplayName = defaultName; + } + } + else + { + bool nameIsValid = NameValidation.IsValid(step.Name, context.BuildOptions.AllowHyphenNames); + if (!nameIsValid) + { + if (context.BuildOptions.ValidateStepNames) + { + return new PipelineValidationError(PipelineStrings.StepNameInvalid(phase.Name, step.Name)); + } + else + { + var sanitizedName = NameValidation.Sanitize(step.Name, context.BuildOptions.AllowHyphenNames); + if (String.IsNullOrEmpty(sanitizedName)) + { + sanitizedName = defaultName; + } + + step.Name = sanitizedName; + nameIsValid = true; + } + } + + if (nameIsValid && !stepValidationResult.KnownNames.Add(step.Name)) + { + if (context.BuildOptions.ValidateStepNames) + { + return new PipelineValidationError(PipelineStrings.StepNamesMustBeUnique(phase.Name, step.Name)); + } + else + { + List stepsToName; + if (!stepValidationResult.UnnamedSteps.TryGetValue(step.Name, out stepsToName)) + { + stepsToName = new List(); + stepValidationResult.UnnamedSteps.Add(step.Name, stepsToName); + } + + stepsToName.Add(step); + } + } + + // If the name was specified but the display name is empty, default the display name to the name + if (String.IsNullOrEmpty(step.DisplayName)) + { + step.DisplayName = step.Name; + } + } + + return null; + } + + private static PipelineValidationError ValidateStepCondition( + PipelineBuildContext context, + PhaseNode phase, + String stepName, + String stepCondition) + { + if (!String.IsNullOrEmpty(stepCondition)) + { + try + { + var parser = new DistributedTask.Expressions.ExpressionParser(); + parser.ValidateSyntax(stepCondition, null); + } + catch (ParseException ex) + { + return new PipelineValidationError(PipelineStrings.StepConditionIsNotValid(phase.Name, stepName, stepCondition, ex.Message)); + } + } + + return null; + } + + private static void GenerateDefaultTaskNames( + ISet knownNames, + IDictionary> unnamedTasks) + { + foreach (var unnamedTasksByName in unnamedTasks) + { + if (unnamedTasksByName.Value.Count == 1 && knownNames.Add(unnamedTasksByName.Key)) + { + unnamedTasksByName.Value[0].Name = unnamedTasksByName.Key; + } + else + { + Int32 taskCounter = 1; + foreach (var unnamedTask in unnamedTasksByName.Value) + { + var candidateName = $"{unnamedTasksByName.Key}{taskCounter}"; + while (!knownNames.Add(candidateName)) + { + taskCounter++; + candidateName = $"{unnamedTasksByName.Key}{taskCounter}"; + } + + taskCounter++; + unnamedTask.Name = candidateName; + } + } + } + } + + private static IList ResolveInputs( + PipelineBuildContext context, + PhaseNode phase, + PipelineResources referencedResources, + PipelineResources unauthorizedResources, + TaskStep step, + TaskDefinition taskDefinition) + { + IList errors = new List(); + foreach (var input in taskDefinition.Inputs) + { + // Resolve by alias + var inputAlias = ResolveAlias(context, step, input); + + // If the input isn't set then there is nothing else to do here + if (!step.Inputs.TryGetValue(input.Name, out String inputValue)) + { + continue; + } + + // If the caller requested input validation and the input provides a validation section then we + // should do a best-effort validation based on what is available in the environment. + errors.AddRange(ValidateInput(context, phase, step, input, inputAlias, inputValue)); + + // Now resolve any resources referenced by task inputs + errors.AddRange(ResolveResources(context, phase, context.BuildOptions, referencedResources, unauthorizedResources, step, input, inputAlias, inputValue, throwOnFailure: false)); + } + + return errors; + } + + private static String ResolveAlias( + PipelineBuildContext context, + TaskStep step, + TaskInputDefinition input) + { + var specifiedName = input.Name; + if (context.BuildOptions.ResolveTaskInputAliases && !step.Inputs.ContainsKey(input.Name)) + { + foreach (String alias in input.Aliases) + { + if (step.Inputs.TryGetValue(alias, out String aliasValue)) + { + specifiedName = alias; + step.Inputs.Remove(alias); + step.Inputs.Add(input.Name, aliasValue); + break; + } + } + } + return specifiedName; + } + + private static IEnumerable ResolveResources( + IPipelineContext context, + PhaseNode phase, + BuildOptions options, + PipelineResources referencedResources, + PipelineResources unauthorizedResources, + TaskStep step, + TaskInputDefinition input, + String inputAlias, + String inputValue, + Boolean throwOnFailure = false) + { + if (String.IsNullOrEmpty(inputValue)) + { + return Enumerable.Empty(); + } + + var errors = new List(); + if (input.InputType.StartsWith(c_endpointInputTypePrefix, StringComparison.OrdinalIgnoreCase)) + { + var resolvedEndpoints = new List(); + var endpointType = input.InputType.Remove(0, c_endpointInputTypePrefix.Length); + var resolvedInputValues = inputValue.Split(',').Select(x => x.Trim()).Where(x => !String.IsNullOrEmpty(x)); + foreach (var value in resolvedInputValues) + { + var replacedValue = context.ExpandVariables(value); + referencedResources.AddEndpointReference(replacedValue); + + // Validate the resource using the provided store if desired + if (options.ValidateResources) + { + var endpoint = context.ResourceStore.GetEndpoint(replacedValue); + if (endpoint == null) + { + if (throwOnFailure) + { + throw new ResourceNotFoundException(PipelineStrings.ServiceEndpointNotFoundForInput(phase.Name, step.Name, inputAlias, replacedValue)); + } + else + { + resolvedEndpoints.Add(replacedValue); + unauthorizedResources?.AddEndpointReference(replacedValue); + errors.Add(new PipelineValidationError(PipelineStrings.ServiceEndpointNotFoundForInput(phase.Name, step.Name, inputAlias, replacedValue))); + } + } + else + { + if (!String.IsNullOrEmpty(endpointType)) + { + var endpointTypeSegments = endpointType.Split(new[] { ':' }, StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()).ToList(); + if (endpointTypeSegments.Count >= 1) + { + var endpointTypeName = endpointTypeSegments[0]; + if (!endpointTypeName.Equals(endpoint.Type, StringComparison.OrdinalIgnoreCase)) + { + if (throwOnFailure) + { + throw new PipelineValidationException(PipelineStrings.StepInputEndpointTypeMismatch(phase.Name, step.Name, inputAlias, endpointTypeName, endpoint.Name, endpoint.Type)); + } + else + { + errors.Add(new PipelineValidationError(PipelineStrings.StepInputEndpointTypeMismatch(phase.Name, step.Name, inputAlias, endpointTypeName, endpoint.Name, endpoint.Type))); + } + } + else if (endpointTypeSegments.Count > 1 && !String.IsNullOrEmpty(endpoint.Authorization?.Scheme)) + { + var supportedAuthSchemes = endpointTypeSegments[1]?.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()).ToList(); + if (supportedAuthSchemes?.Count > 0 && !supportedAuthSchemes.Any(x => x.Equals(endpoint.Authorization.Scheme, StringComparison.OrdinalIgnoreCase))) + { + if (throwOnFailure) + { + throw new PipelineValidationException(PipelineStrings.StepInputEndpointAuthSchemeMismatch(phase.Name, step.Name, inputAlias, endpointTypeName, endpointTypeSegments[1], endpoint.Name, endpoint.Type, endpoint.Authorization.Scheme)); + } + else + { + errors.Add(new PipelineValidationError(PipelineStrings.StepInputEndpointAuthSchemeMismatch(phase.Name, step.Name, inputAlias, endpointTypeName, endpointTypeSegments[1], endpoint.Name, endpoint.Type, endpoint.Authorization.Scheme))); + } + } + } + } + } + + resolvedEndpoints.Add(endpoint.Id.ToString("D")); + } + } + else + { + // Always add the value back so we can update the input below. + resolvedEndpoints.Add(replacedValue); + } + } + + step.Inputs[input.Name] = String.Join(",", resolvedEndpoints); + } + else if (input.InputType.Equals(c_secureFileInputType, StringComparison.OrdinalIgnoreCase)) + { + var resolvedFiles = new List(); + var resolvedInputValues = inputValue.Split(',').Select(x => x.Trim()).Where(x => !String.IsNullOrEmpty(x)); + foreach (var value in resolvedInputValues) + { + var replacedValue = context.ExpandVariables(value); + referencedResources.AddSecureFileReference(replacedValue); + + // Validate the resource using the provided store if desired + if (options.ValidateResources) + { + var secureFile = context.ResourceStore.GetFile(replacedValue); + if (secureFile == null) + { + if (throwOnFailure) + { + throw new ResourceNotFoundException(PipelineStrings.SecureFileNotFoundForInput(phase.Name, step.Name, inputAlias, replacedValue)); + } + else + { + resolvedFiles.Add(replacedValue); + unauthorizedResources?.AddSecureFileReference(replacedValue); + errors.Add(new PipelineValidationError(PipelineStrings.SecureFileNotFoundForInput(phase.Name, step.Name, inputAlias, replacedValue))); + } + } + else + { + resolvedFiles.Add(secureFile.Id.ToString("D")); + } + } + else + { + // Always add the value back so we can update the input below. + resolvedFiles.Add(replacedValue); + } + } + + step.Inputs[input.Name] = String.Join(",", resolvedFiles); + } + else if (input.InputType.Equals(TaskInputType.Repository, StringComparison.OrdinalIgnoreCase)) + { + // Ignore repository alias None + if (!String.Equals(inputValue, PipelineConstants.NoneAlias)) + { + var repository = context.ResourceStore.Repositories.Get(inputValue); + if (repository == null) + { + if (options.ValidateResources) + { + // repository should always be there as full object + if (throwOnFailure) + { + throw new ResourceNotFoundException(PipelineStrings.RepositoryResourceNotFound(inputValue)); + } + else + { + errors.Add(new PipelineValidationError(PipelineStrings.RepositoryResourceNotFound(inputValue))); + } + } + } + else + { + referencedResources.Repositories.Add(repository); + + // Add the endpoint + if (repository.Endpoint != null) + { + referencedResources.AddEndpointReference(repository.Endpoint); + + if (options.ValidateResources) + { + var repositoryEndpoint = context.ResourceStore.GetEndpoint(repository.Endpoint); + if (repositoryEndpoint == null) + { + if (throwOnFailure) + { + throw new ResourceNotFoundException(PipelineStrings.ServiceEndpointNotFound(repository.Endpoint)); + } + else + { + unauthorizedResources?.AddEndpointReference(repository.Endpoint); + errors.Add(new PipelineValidationError(PipelineStrings.ServiceEndpointNotFound(repository.Endpoint))); + } + } + else + { + repository.Endpoint = new ServiceEndpointReference() { Id = repositoryEndpoint.Id }; + } + } + } + } + } + else + { + // always add self repo with checkout: none + var selfRepository = context.ResourceStore.Repositories.Get(PipelineConstants.SelfAlias); + if (selfRepository != null) + { + referencedResources.Repositories.Add(selfRepository); + if (selfRepository.Endpoint != null) + { + referencedResources.AddEndpointReference(selfRepository.Endpoint); + if (options.ValidateResources) + { + var repositoryEndpoint = context.ResourceStore.GetEndpoint(selfRepository.Endpoint); + if (repositoryEndpoint == null) + { + if (throwOnFailure) + { + throw new ResourceNotFoundException(PipelineStrings.ServiceEndpointNotFound(selfRepository.Endpoint)); + } + else + { + unauthorizedResources?.AddEndpointReference(selfRepository.Endpoint); + errors.Add(new PipelineValidationError(PipelineStrings.ServiceEndpointNotFound(selfRepository.Endpoint))); + } + } + else + { + selfRepository.Endpoint = new ServiceEndpointReference() { Id = repositoryEndpoint.Id }; + } + } + } + } + } + } + + return errors; + } + + private String ResolveContainerResource(JobExecutionContext context, String inputAlias) + { + var outputAlias = inputAlias; + // Check if container is an image spec, not an alias + if (inputAlias.Contains(":")) + { + var resource = context.ResourceStore?.Containers.GetAll().FirstOrDefault(x => + x.Endpoint == null && + x.Properties.Count == 1 && + String.Equals(x.Image, inputAlias, StringComparison.Ordinal)); + if (resource == null) + { + resource = new ContainerResource + { + Alias = Guid.NewGuid().ToString("N"), + Image = inputAlias, + }; + context.ResourceStore?.Containers.Add(resource); + } + outputAlias = resource.Alias; + } + + return outputAlias; + } + + private void UpdateJobContextReferencedContainers(JobExecutionContext context, string containerAlias) + { + // Look up the container by alias, and add dereferenced container to ReferencedResources + var containerResource = context.ResourceStore?.Containers.Get(containerAlias); + if (containerResource == null) + { + throw new ResourceNotFoundException(PipelineStrings.ContainerResourceNotFound(containerAlias)); + } + context.ReferencedResources.Containers.Add(containerResource); + if (containerResource.Endpoint != null) + { + context.ReferencedResources.AddEndpointReference(containerResource.Endpoint); + var serviceEndpoint = context.ResourceStore?.GetEndpoint(containerResource.Endpoint); + if (serviceEndpoint == null) + { + throw new ResourceNotFoundException(PipelineStrings.ContainerEndpointNotFound(containerAlias, containerResource.Endpoint)); + } + } + } + + private static IEnumerable ValidateInput( + PipelineBuildContext context, + PhaseNode phase, + TaskStep step, + TaskInputDefinition input, + String inputAlias, + String value) + { + if (!context.BuildOptions.ValidateTaskInputs || input.Validation == null) + { + return Enumerable.Empty(); + } + + // We cannot perform useful validation if the value didn't expand, it may not be populated until it + // executes on the target. If we still have variables we just let it go through optimistically. + var expandedInputValue = context.ExpandVariables(value); + if (VariableUtility.IsVariable(expandedInputValue)) + { + return Enumerable.Empty(); + } + + var inputContext = new InputValidationContext + { + Evaluate = true, + EvaluationOptions = new DistributedTask.Expressions.EvaluationOptions(), + Expression = input.Validation.Expression, + SecretMasker = context.SecretMasker, + TraceWriter = null, + Value = expandedInputValue, + }; + + // Make sure to track any input validation errors encountered + var validationResult = context.InputValidator.Validate(inputContext); + if (validationResult.IsValid) + { + return Enumerable.Empty(); + } + else + { + // Make sure we do not expose secrets when logging errors about expanded input values + var maskedValue = context.SecretMasker.MaskSecrets(expandedInputValue); + var reason = validationResult.Reason ?? input.Validation.Message; + return new[] { new PipelineValidationError(PipelineStrings.StepTaskInputInvalid(phase.Name, step.Name, inputAlias, maskedValue, inputContext.Expression, reason)) }; + } + } + + /// + /// Produces the official display name for this job. + /// Optionally include any of the path components you want to consider. + /// + internal static String GenerateDisplayName( + Stage stage = null, + PhaseNode phase = null, + Job job = null) + { + var stageName = default(string); + if (stage != null) + { + stageName = stage.DisplayName ?? stage.Name; + } + + var factoryName = default(string); + if (phase != null) + { + factoryName = phase.DisplayName ?? phase.Name; + } + + var jobName = default(string); + if (job != null) + { + jobName = job.DisplayName ?? job.Name; + } + + return GenerateDisplayName(stageName, factoryName, jobName); + } + + /// + /// Produces the official display name for this job. + /// Optionally include any of the path components you want to consider. + /// + internal static String GenerateDisplayName( + PhaseNode factory, + String configuration = null) + { + var factoryDisplayName = factory == null + ? String.Empty + : factory.DisplayName ?? factory.Name; + return GenerateDisplayName(factoryDisplayName, configuration); + } + + /// + /// Produces the official display name for this job. + /// Optionally include any of the path components you want to consider. + /// Removes any occurrence of the default node name (the reference name used for default nodes, + /// or when users do not specify any name) + /// + internal static String GenerateDisplayName(params string[] tokens) + { + if (tokens == null) + { + return string.Empty; + } + + var defaultNodeName = PipelineConstants.DefaultJobName; + var l = defaultNodeName.Length; + var formattedTokens = tokens + .Where(x => !string.IsNullOrWhiteSpace(x)) + .Select(x => (x.StartsWith(defaultNodeName) ? x.Substring(l) : x).Trim()) + .Where(x => !string.IsNullOrWhiteSpace(x)); + var result = string.Join(" ", formattedTokens); + + return string.IsNullOrWhiteSpace(result) + ? PipelineConstants.DefaultJobDisplayName + : result; + } + + public Job CreateJob( + JobExecutionContext context, + ExpressionValue container, + IDictionary> sidecarContainers, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + String displayName = null) + { + // default display name is based on the phase. + if (string.IsNullOrWhiteSpace(displayName)) + { + displayName = Phase.GenerateDisplayName(context.Phase.Definition); + } + + var job = new Job + { + Id = context.GetInstanceId(), + Name = context.Job.Name, + DisplayName = displayName, + ContinueOnError = continueOnError, + TimeoutInMinutes = timeoutInMinutes, + CancelTimeoutInMinutes = cancelTimeoutInMinutes + }; + + if (context.ExecutionOptions.EnableResourceExpressions) + { + job.Target = GenerateJobSpecificTarget(context); + } + + if (job.Target == null) + { + ArgumentUtility.CheckForNull(this.Target, nameof(this.Target)); + job.Target = this.Target.Clone(); + } + + if (context.EnvironmentVersion > 1) + { + // always add self or designer repo to repository list + RepositoryResource defaultRepo = null; + var selfRepo = context.ResourceStore?.Repositories.Get(PipelineConstants.SelfAlias); + if (selfRepo == null) + { + var designerRepo = context.ResourceStore?.Repositories.Get(PipelineConstants.DesignerRepo); + if (designerRepo != null) + { + defaultRepo = designerRepo; + } + else + { + Debug.Fail("Repositories are not filled in."); + } + } + else + { + defaultRepo = selfRepo; + } + + if (defaultRepo != null) + { + context.ReferencedResources.Repositories.Add(defaultRepo); + + // Add the endpoint + if (defaultRepo.Endpoint != null) + { + context.ReferencedResources.AddEndpointReference(defaultRepo.Endpoint); + var repositoryEndpoint = context.ResourceStore?.GetEndpoint(defaultRepo.Endpoint); + if (repositoryEndpoint == null) + { + throw new ResourceNotFoundException(PipelineStrings.ServiceEndpointNotFound(defaultRepo.Endpoint)); + } + } + } + } + + // Expand short-syntax inline-containers, resolve resource references and add to new job and context + if (container != null) + { + var containerAlias = container.GetValue(context).Value; + var outputAlias = ResolveContainerResource(context, containerAlias); + job.Container = new DistributedTask.ObjectTemplating.Tokens.StringToken(null, null, null, outputAlias); + UpdateJobContextReferencedContainers(context, outputAlias); + } + if (sidecarContainers != null) + { + foreach (var sidecar in sidecarContainers) + { + var sidecarContainerAlias = sidecar.Value.GetValue(context).Value; + var outputAlias = ResolveContainerResource(context, sidecarContainerAlias); + job.SidecarContainers.Add(sidecar.Key, outputAlias); + UpdateJobContextReferencedContainers(context, outputAlias); + } + } + + // Update the execution context with the job-specific system variables + UpdateJobContextVariablesFromJob(context, job); + + var steps = new List(); + var identifier = context.GetInstanceName(); + foreach (var step in this.Steps) + { + if (step.Type == StepType.Task) + { + // We don't need to add to demands here since they are already part of the plan. + steps.Add(CreateJobTaskStep(context, this, identifier, step as TaskStep)); + } + else if (step.Type == StepType.Group) + { + steps.Add(CreateJobStepGroup(context, this, identifier, step as GroupStep)); + } + else if (step.Type == StepType.Action) + { + steps.Add(CreateJobActionStep(context, identifier, step as ActionStep)); + } + else + { + // Should never happen. + Debug.Fail(step.Type.ToString()); + } + } + + // TODO: remove the whole concept of step providers. + // this work should happen during compilation. -zacox. + // + // This is not ideal but we need to set the job on the context before calling the step providers below. + // This method returns the job and it gets set after. We need to clean this up when we do refactoring. + context.Job.Definition = job; + + var stepProviderDemands = new HashSet(); + + // Add the system-injected tasks before inserting the user tasks. This currently does not handle injecting + // min agent version demands if appropriate. + var systemSteps = new List(); + if (context.StepProviders != null) + { + var jobSteps = new ReadOnlyCollection(steps); + + foreach (IStepProvider stepProvider in context.StepProviders) + { + systemSteps.AddRange(stepProvider.GetPreSteps(context, jobSteps)); + } + } + + if (systemSteps?.Count > 0) + { + for (Int32 i = 0; i < systemSteps.Count; i++) + { + systemSteps[i].Name = $"__system_{i + 1}"; + + IList resolvedSteps = new List(); + if (ResolveTaskStep(context, this, identifier, systemSteps[i], out resolvedSteps, stepProviderDemands)) + { + job.Steps.AddRange(resolvedSteps); + } + else + { + job.Steps.Add(CreateJobTaskStep(context, this, identifier, systemSteps[i], stepProviderDemands)); + } + } + } + + // Resolving user steps + foreach (var step in steps) + { + IList resolvedSteps = new List(); + if (ResolveTaskStep(context, this, identifier, step, out resolvedSteps)) + { + job.Steps.AddRange(resolvedSteps); + } + else + { + job.Steps.Add(step); + } + } + + // Add post job steps, if there are any. + // These are added after the user tasks. + var postJobSteps = new List(); + if (context.StepProviders != null) + { + var jobSteps = new ReadOnlyCollection(job.Steps); + + foreach (IStepProvider stepProvider in context.StepProviders) + { + postJobSteps.AddRange(stepProvider.GetPostSteps(context, jobSteps)); + } + } + + if (postJobSteps?.Count > 0) + { + for (Int32 i = 0; i < postJobSteps.Count; i++) + { + postJobSteps[i].Name = $"__system_post_{i + 1}"; + job.Steps.Add(CreateJobTaskStep(context, this, identifier, postJobSteps[i], stepProviderDemands)); + } + } + + // Add post checkout tasks + // We need to do this after all other tasks are added since checkout could be injected from pre steps or phase steps. + var postCheckoutSteps = new Dictionary>(); + if (context.StepProviders != null) + { + var jobSteps = new ReadOnlyCollection(steps); + + foreach (IStepProvider stepProvider in context.StepProviders) + { + // This will flatten into 1 dictionary of guids to insert after and the tasks to insert + Dictionary> toAdd = stepProvider.GetPostTaskSteps(context, jobSteps); + + foreach (var key in toAdd.Keys) + { + if (!postCheckoutSteps.ContainsKey(key)) + { + postCheckoutSteps[key] = new List(); + } + + postCheckoutSteps[key].AddRange(toAdd[key]); + } + } + } + + if (postCheckoutSteps?.Keys.Count > 0) + { + foreach (var pair in postCheckoutSteps) + { + Int32? indexOfLastInstanceOfTask = null; + for (int i = job.Steps.Count - 1; i >= 0; i--) + { + var taskStep = job.Steps[i] as TaskStep; + + if (taskStep != null && taskStep.Reference.Id.Equals(pair.Key)) + { + indexOfLastInstanceOfTask = i; + break; + } + } + + // This will not insert after if the task doesn't exist. + if (indexOfLastInstanceOfTask.HasValue) + { + for (Int32 i = 0; i < pair.Value.Count; i++) + { + // This is so that we know this was a system injected task. + pair.Value[i].Name = $"__system_postcheckout_{i + 1}"; + + job.Steps.Insert(indexOfLastInstanceOfTask.Value + i + 1, CreateJobTaskStep(context, this, identifier, pair.Value[i], stepProviderDemands)); + } + } + } + } + + // create unique set of job demands + AddDemands(context, job, stepProviderDemands); + AddDemands(context, job, this.Target?.Demands); + + // Copy context variables into job, since job will be saved and read back later before agent job message is sent + foreach (var variable in context.Variables) + { + context.Job.Definition.Variables.Add(new Variable + { + Name = variable.Key, + Value = variable.Value.IsSecret ? null : variable.Value.Value, + Secret = variable.Value.IsSecret + }); + } + + return job; + } + + private void AddDemands( + JobExecutionContext context, + Job job, + ISet demands) + { + if (context == null || job == null || demands == null) + { + return; + } + + var mergedDemands = job.Demands; + foreach (var d in demands) + { + if (d == null) + { + continue; + } + + var demandValue = d.Value; + if (String.IsNullOrEmpty(demandValue)) + { + // if a demand has no value, add it + mergedDemands.Add(d.Clone()); + } + else + { + // if a demand has a non-empty value, any problems encountered while evaluating + // macros should be promoted into PipelineValidationExceptions + var expandedValue = context.ExpandVariables(demandValue, maskSecrets: true); + try + { + var resolvedDemand = d.Clone(); + resolvedDemand.Update(expandedValue); + mergedDemands.Add(resolvedDemand); + } + catch (Exception e) + { + throw new PipelineValidationException(PipelineStrings.DemandExpansionInvalid(d.ToString(), d.Value, expandedValue), e); + } + } + } + } + + private Boolean ResolveTaskStep( + JobExecutionContext context, + PhaseNode phase, + String identifier, + JobStep step, + out IList resolvedSteps, + HashSet resolvedDemands = null) + { + Boolean handled = false; + IList resultSteps = new List(); + resolvedSteps = new List(); + + if (context.ResourceStore?.ResolveStep(context, step, out resultSteps) ?? false) + { + foreach (TaskStep resultStep in resultSteps) + { + resolvedSteps.Add(CreateJobTaskStep(context, phase, identifier, resultStep, resolvedDemands)); + } + + handled = true; + } + + return handled; + } + + /// + /// Evaluate runtime expressions + /// Queue targets are allowed use runtime expressions. + /// Resolve all expressions and produce a literal QueueTarget for execution. + /// + private AgentQueueTarget GenerateJobSpecificTarget(JobExecutionContext context) + { + var phase = context?.Phase?.Definition as Phase; + if (phase == null) + { + return null; + } + + if (!(phase.Target is AgentQueueTarget agentQueueTarget)) + { + return null; + } + + if (agentQueueTarget.IsLiteral()) + { + return null; + } + + // create a clone containing only literals and validate referenced resources + var validationResult = new Validation.ValidationResult(); + var literalTarget = agentQueueTarget.Evaluate(context, validationResult); + literalTarget?.Validate( + context: context, + result: validationResult, + buildOptions: new BuildOptions + { + EnableResourceExpressions = true, + ValidateResources = true, + ValidateExpressions = true, // all expressions must resolve + AllowEmptyQueueTarget = false + }, + steps: new List(), + taskDemands: new HashSet()); + + if (validationResult.Errors.Count > 0) + { + throw new PipelineValidationException(validationResult.Errors); + } + + return literalTarget; + } + + // todo: merge JobFactory.cs and Phase.cs and then make this private + internal static ActionStep CreateJobActionStep( + JobExecutionContext context, + String jobIdentifier, + ActionStep action) + { + var jobStep = (ActionStep)action.Clone(); + + // Setup the identifier based on our current context + var actionIdentifier = context.IdGenerator.GetInstanceName(jobIdentifier, action.Name); + jobStep.Id = context.IdGenerator.GetInstanceId(actionIdentifier); + + // Update the display name of task steps + jobStep.DisplayName = jobStep.DisplayName; + + return jobStep; + } + + // todo: merge JobFactory.cs and Phase.cs and then make this private + internal static TaskStep CreateJobTaskStep( + JobExecutionContext context, + PhaseNode phase, + String jobIdentifier, + TaskStep task, + HashSet resolvedDemands = null) + { + var jobStep = (TaskStep)task.Clone(); + + // Setup the identifier based on our current context + var taskIdentifier = context.IdGenerator.GetInstanceName(jobIdentifier, task.Name); + jobStep.Id = context.IdGenerator.GetInstanceId(taskIdentifier); + + // Update the display name of task steps + jobStep.DisplayName = context.ExpandVariables(jobStep.DisplayName, maskSecrets: true); + + // Now resolve any resources referenced by inputs + var taskDefinition = context.TaskStore.ResolveTask(jobStep.Reference.Id, jobStep.Reference.Version); + if (taskDefinition != null) + { + foreach (var input in taskDefinition.Inputs) + { + if (task.Inputs.TryGetValue(input.Name, out String value)) + { + ResolveResources(context, phase, BuildOptions.None, context.ReferencedResources, null, jobStep, input, input.Name, value, throwOnFailure: true); + } + } + + // Add demands + if (resolvedDemands != null) + { + resolvedDemands.AddRange(taskDefinition.Demands); + + if (!String.IsNullOrEmpty(taskDefinition.MinimumAgentVersion)) + { + resolvedDemands.Add(new DemandMinimumVersion(PipelineConstants.AgentVersionDemandName, taskDefinition.MinimumAgentVersion)); + } + } + } + else + { + throw new TaskDefinitionNotFoundException(PipelineStrings.TaskMissing(phase.Name, jobStep.Name, jobStep.Reference.Id, jobStep.Reference.Version)); + } + + return jobStep; + } + + // todo: merge JobFactory.cs and Phase.cs and then make this private + internal static GroupStep CreateJobStepGroup( + JobExecutionContext context, + PhaseNode phase, + String jobIdentifier, + GroupStep group) + { + var groupStep = (GroupStep)group.Clone(); + + var groupIdentifier = context.IdGenerator.GetInstanceName(jobIdentifier, group.Name); + groupStep.Id = context.IdGenerator.GetInstanceId(groupIdentifier); + + // Update the display name of step group + groupStep.DisplayName = context.ExpandVariables(groupStep.DisplayName, maskSecrets: true); + + // Now resolve every task steps within step group + var stepsCopy = new List(); + foreach (var task in groupStep.Steps) + { + stepsCopy.Add(CreateJobTaskStep(context, phase, groupIdentifier, task)); + } + + groupStep.Steps.Clear(); + groupStep.Steps.AddRange(stepsCopy); + + return groupStep; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_steps?.Count == 0) + { + m_steps = null; + } + } + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private IList m_steps; + + private const String c_secureFileInputType = "secureFile"; + private const String c_endpointInputTypePrefix = "connectedService:"; + + // todo: merge JobFactory.cs and Phase.cs and then make this private + internal class StepValidationResult + { + public String MinAgentVersion { get; set; } + + public HashSet TaskDemands + { + get + { + if (m_taskDemands == null) + { + m_taskDemands = new HashSet(); + } + + return m_taskDemands; + } + } + + public HashSet KnownNames + { + get + { + if (m_knownNames == null) + { + m_knownNames = new HashSet(StringComparer.OrdinalIgnoreCase); + } + + return m_knownNames; + } + } + + public HashSet TasksSatisfy + { + get + { + if (m_tasksSatisfy == null) + { + m_tasksSatisfy = new HashSet(StringComparer.OrdinalIgnoreCase); + } + + return m_tasksSatisfy; + } + } + + public Dictionary> UnnamedSteps + { + get + { + if (m_unnamedSteps == null) + { + m_unnamedSteps = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } + + return m_unnamedSteps; + } + } + + HashSet m_taskDemands; + HashSet m_knownNames; + HashSet m_tasksSatisfy; + Dictionary> m_unnamedSteps; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PhaseCondition.cs b/src/Sdk/DTPipelines/Pipelines/PhaseCondition.cs new file mode 100644 index 00000000000..eae3ddb45fa --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PhaseCondition.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Runtime; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PhaseCondition : GraphCondition + { + public PhaseCondition(String condition) + : base(condition) + { + } + + public ConditionResult Evaluate(PhaseExecutionContext context) + { + var traceWriter = new ConditionTraceWriter(); + var evaluationResult = m_parsedCondition.Evaluate(traceWriter, context.SecretMasker, context, context.ExpressionOptions); + return new ConditionResult() { Value = evaluationResult.IsTruthy, Trace = traceWriter.Trace }; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PhaseDependency.cs b/src/Sdk/DTPipelines/Pipelines/PhaseDependency.cs new file mode 100644 index 00000000000..3cccfaeacb4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PhaseDependency.cs @@ -0,0 +1,56 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PhaseDependency + { + [JsonConstructor] + public PhaseDependency() + { + } + + private PhaseDependency(PhaseDependency dependencyToCopy) + { + this.Scope = dependencyToCopy.Scope; + this.Event = dependencyToCopy.Event; + } + + [DataMember] + public String Scope + { + get; + set; + } + + [DataMember] + public String Event + { + get; + set; + } + + /// + /// Implicitly converts a Phase to a PhaseDependency to enable easier modeling of graphs. + /// + /// The phase which should be converted to a dependency + public static implicit operator PhaseDependency(Phase dependency) + { + return PhaseCompleted(dependency.Name); + } + + public static PhaseDependency PhaseCompleted(String name) + { + return new PhaseDependency { Scope = name, Event = "Completed" }; + } + + internal PhaseDependency Clone() + { + return new PhaseDependency(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PhaseNode.cs b/src/Sdk/DTPipelines/Pipelines/PhaseNode.cs new file mode 100644 index 00000000000..70c9fa3f572 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PhaseNode.cs @@ -0,0 +1,421 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public enum PhaseType + { + [EnumMember] + Phase, + + [EnumMember] + Provider, + + [EnumMember] + JobFactory, + } + + [DataContract] + [KnownType(typeof(Phase))] + [KnownType(typeof(ProviderPhase))] + [KnownType(typeof(JobFactory))] + [JsonConverter(typeof(PhaseNodeJsonConverter))] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PhaseNode : IGraphNode + { + protected PhaseNode() + { + } + + protected PhaseNode(PhaseNode nodeToCopy) + { + this.Name = nodeToCopy.Name; + this.DisplayName = nodeToCopy.DisplayName; + this.Condition = nodeToCopy.Condition; + this.ContinueOnError = nodeToCopy.ContinueOnError; + this.Target = nodeToCopy.Target?.Clone(); + + if (nodeToCopy.m_dependsOn?.Count > 0) + { + m_dependsOn = new HashSet(nodeToCopy.m_dependsOn, StringComparer.OrdinalIgnoreCase); + } + + if (nodeToCopy.m_variables != null && nodeToCopy.m_variables.Count > 0) + { + m_variables = new List(nodeToCopy.m_variables); + } + } + + /// + /// Gets the type of this phase. + /// + [DataMember(EmitDefaultValue = false)] + public abstract PhaseType Type { get; } + + /// + /// Gets or sets the name for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Gets or sets the display name for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + /// + /// Gets or sets a condition which is evaluated after all dependencies have been satisfied and determines + /// whether or not the jobs within this phase should be executed or skipped. + /// + [DataMember(EmitDefaultValue = false)] + public String Condition + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not failed jobs are considered a termination condition for + /// the phase. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue ContinueOnError + { + get; + set; + } + + /// + /// Gets the set of phases which must complete before this phase begins execution. + /// + public ISet DependsOn + { + get + { + if (m_dependsOn == null) + { + m_dependsOn = new HashSet(StringComparer.OrdinalIgnoreCase); + } + return m_dependsOn; + } + } + + /// + /// Gets or sets the target for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public PhaseTarget Target + { + get; + set; + } + + /// + /// Gets the set of variables which will be provided to the phase steps. + /// + public IList Variables + { + get + { + if (m_variables == null) + { + m_variables = new List(); + } + return m_variables; + } + } + + /// + /// Resolves external references and ensures the steps are compatible with the selected target. + /// + /// The validation context + public virtual void Validate( + PipelineBuildContext context, + ValidationResult result) + { + // Ensure we have a target + if (this.Target == null) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.PhaseTargetRequired(this.Name))); + return; + } + + if (this.Target.Type != PhaseTargetType.Queue && this.Target.Type != PhaseTargetType.Server && this.Target.Type != PhaseTargetType.Pool) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.UnsupportedTargetType(this.Name, this.Target.Type))); + return; + } + + // Default the condition to something reasonable if none is specified + if (String.IsNullOrEmpty(this.Condition)) + { + this.Condition = PhaseCondition.Default; + } + else + { + // Simply construct the condition and make sure the syntax and functions used are valid + var parsedCondition = new PhaseCondition(this.Condition); + } + + if (m_variables?.Count > 0) + { + var variablesCopy = new List(); + foreach (var variable in this.Variables) + { + if (variable is Variable simpleVariable) + { + // Do not allow phase overrides for certain variables. + if (s_nonOverridableVariables.Contains(simpleVariable.Name)) + { + continue; + } + } + else if (variable is VariableGroupReference groupVariable) + { + if (context.EnvironmentVersion < 2) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.PhaseVariableGroupNotSupported(this.Name, groupVariable))); + continue; + } + + result.ReferencedResources.VariableGroups.Add(groupVariable); + + if (context.BuildOptions.ValidateResources) + { + var variableGroup = context.ResourceStore.VariableGroups.Get(groupVariable); + if (variableGroup == null) + { + result.UnauthorizedResources.VariableGroups.Add(groupVariable); + result.Errors.Add(new PipelineValidationError(PipelineStrings.VariableGroupNotFoundForPhase(this.Name, groupVariable))); + } + } + } + + variablesCopy.Add(variable); + } + + m_variables.Clear(); + m_variables.AddRange(variablesCopy); + } + } + + protected virtual void UpdateJobContextVariablesFromJob(JobExecutionContext jobContext, Job job) + { + jobContext.Variables[WellKnownDistributedTaskVariables.JobDisplayName] = job.DisplayName; + jobContext.Variables[WellKnownDistributedTaskVariables.JobId] = job.Id.ToString("D"); + jobContext.Variables[WellKnownDistributedTaskVariables.JobName] = job.Name; + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (m_dependencies?.Count > 0) + { + m_dependsOn = new HashSet(m_dependencies.Select(x => x.Scope), StringComparer.OrdinalIgnoreCase); + m_dependencies = null; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_dependsOn?.Count == 0) + { + m_dependsOn = null; + } + + if (m_variables?.Count == 0) + { + m_variables = null; + } + } + + internal readonly static HashSet s_nonOverridableVariables = new HashSet(new[] + { + WellKnownDistributedTaskVariables.AccessTokenScope, + WellKnownDistributedTaskVariables.JobParallelismTag + }, StringComparer.OrdinalIgnoreCase); + + [JsonConverter(typeof(PhaseVariablesJsonConverter))] + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private List m_variables; + + [DataMember(Name = "Dependencies", EmitDefaultValue = false)] + private List m_dependencies; + + [DataMember(Name = "DependsOn", EmitDefaultValue = false)] + private HashSet m_dependsOn; + + private class PhaseVariablesJsonConverter : JsonConverter + { + public PhaseVariablesJsonConverter() + { + } + + public override Boolean CanConvert(Type objectType) + { + return true; + } + + public override Boolean CanWrite => true; + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.StartArray) + { + return serializer.Deserialize>(reader); + } + else if (reader.TokenType == JsonToken.StartObject) + { + var dictionary = serializer.Deserialize>(reader); + if (dictionary?.Count > 0) + { + return dictionary.Select(x => new Variable { Name = x.Key, Value = x.Value }).Cast().ToList(); + } + } + + return null; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + var variables = value as IList; + if (variables?.Count > 0) + { + if (variables.Any(x => x is VariableGroupReference)) + { + serializer.Serialize(writer, variables); + } + else + { + // This format is only here for back compat with the previous serialization format + var dictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var variable in variables.OfType()) + { + dictionary[variable.Name] = variable.Value; + } + + serializer.Serialize(writer, dictionary); + } + } + } + } + } + + internal sealed class PhaseNodeJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(PhaseNode).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + PhaseType? phaseType = null; + JObject value = JObject.Load(reader); + if (!value.TryGetValue("Type", StringComparison.OrdinalIgnoreCase, out JToken phaseTypeValue)) + { + phaseType = PhaseType.Phase; + } + else + { + if (phaseTypeValue.Type == JTokenType.Integer) + { + phaseType = (PhaseType)(Int32)phaseTypeValue; + } + else if (phaseTypeValue.Type == JTokenType.String) + { + PhaseType parsedType; + if (Enum.TryParse((String)phaseTypeValue, ignoreCase: true, result: out parsedType)) + { + phaseType = parsedType; + } + } + } + + if (phaseType == null) + { + return existingValue; + } + + Object newValue = null; + switch (phaseType) + { + case PhaseType.Phase: + newValue = new Phase(); + break; + + case PhaseType.Provider: + newValue = new ProviderPhase(); + break; + + case PhaseType.JobFactory: + newValue = new JobFactory(); + break; + } + + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PhaseTarget.cs b/src/Sdk/DTPipelines/Pipelines/PhaseTarget.cs new file mode 100644 index 00000000000..37f424d7377 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PhaseTarget.cs @@ -0,0 +1,260 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [KnownType(typeof(AgentQueueTarget))] + [KnownType(typeof(AgentPoolTarget))] + [KnownType(typeof(ServerTarget))] + [KnownType(typeof(DeploymentGroupTarget))] + [JsonConverter(typeof(PhaseTargetJsonConverter))] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PhaseTarget + { + protected PhaseTarget(PhaseTargetType type) + { + this.Type = type; + } + + protected PhaseTarget(PhaseTarget targetToClone) + { + this.Type = targetToClone.Type; + this.ContinueOnError = targetToClone.ContinueOnError; + this.TimeoutInMinutes = targetToClone.TimeoutInMinutes; + this.CancelTimeoutInMinutes = targetToClone.CancelTimeoutInMinutes; + if (targetToClone.m_demands?.Count > 0) + { + m_demands = new HashSet(targetToClone.m_demands.Select(x => x.Clone())); + } + } + + /// + /// Get the type of target. + /// + [DataMember] + public PhaseTargetType Type + { + get; + private set; + } + + /// + /// Gets or sets a value which determines whether or not to treat errors as terminal. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue ContinueOnError + { + get; + set; + } + + /// + /// Gets or sets a value which determines the maximum amount of time a job is allowed to execute. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue TimeoutInMinutes + { + get; + set; + } + + /// + /// Gets or sets a value which determines the maximum amount of time a job is allowed for cancellation. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue CancelTimeoutInMinutes + { + get; + set; + } + + /// + /// Gets the demands which determine where this job may be run. + /// + public ISet Demands + { + get + { + if (m_demands == null) + { + m_demands = new HashSet(); + } + return m_demands; + } + } + + /// + /// Creates a deep copy of the current instance. + /// + /// A new PhaseTarget instance of the current type + public abstract PhaseTarget Clone(); + + /// + /// Indicates whether the task definition can run on the target. + /// + public abstract Boolean IsValid(TaskDefinition task); + + internal abstract JobExecutionContext CreateJobContext(PhaseExecutionContext context, String jobName, Int32 attempt, Boolean continueOnError, Int32 timeoutInMinutes, Int32 cancelTimeoutInMinutes, IJobFactory jobFactory); + + internal void Validate( + IPipelineContext context, + BuildOptions buildOptions, + ValidationResult result) + { + this.Validate(context, buildOptions, result, new List(), new HashSet()); + } + + internal abstract ExpandPhaseResult Expand(PhaseExecutionContext context, Boolean continueOnError, Int32 timeoutInMinutes, Int32 cancelTimeoutInMinutes, IJobFactory jobFactory, JobExpansionOptions options); + + internal virtual void Validate( + IPipelineContext context, + BuildOptions buildOptions, + ValidationResult result, + IList steps, + ISet taskDemands) + { + } + + internal JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + String jobName, + Int32 attempt, + IJobFactory jobFactory) + { + var continueOnError = context.Evaluate(nameof(ContinueOnError), this.ContinueOnError, false).Value; + var timeoutInMinutes = context.Evaluate(nameof(TimeoutInMinutes), this.TimeoutInMinutes, PipelineConstants.DefaultJobTimeoutInMinutes).Value; + var cancelTimeoutInMinutes = context.Evaluate(nameof(CancelTimeoutInMinutes), this.CancelTimeoutInMinutes, PipelineConstants.DefaultJobCancelTimeoutInMinutes).Value; + return this.CreateJobContext(context, jobName, attempt, continueOnError, timeoutInMinutes, cancelTimeoutInMinutes, jobFactory); + } + + internal ExpandPhaseResult Expand( + PhaseExecutionContext context, + IJobFactory jobFactory, + JobExpansionOptions options) + { + var continueOnError = context.Evaluate(nameof(ContinueOnError), this.ContinueOnError, false).Value; + var timeoutInMinutes = context.Evaluate(nameof(TimeoutInMinutes), this.TimeoutInMinutes, PipelineConstants.DefaultJobTimeoutInMinutes).Value; + var cancelTimeoutInMinutes = context.Evaluate(nameof(CancelTimeoutInMinutes), this.CancelTimeoutInMinutes, PipelineConstants.DefaultJobCancelTimeoutInMinutes).Value; + return this.Expand(context, continueOnError, timeoutInMinutes, cancelTimeoutInMinutes, jobFactory, options); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_demands?.Count == 0) + { + m_demands = null; + } + } + + [DataMember(Name = "Demands", EmitDefaultValue = false)] + private ISet m_demands; + } + + internal sealed class PhaseTargetJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(PhaseTarget).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + PhaseTargetType? targetType = null; + JObject value = JObject.Load(reader); + if (!value.TryGetValue("Type", StringComparison.OrdinalIgnoreCase, out JToken targetTypeValue)) + { + return existingValue; + } + else + { + if (targetTypeValue.Type == JTokenType.Integer) + { + targetType = (PhaseTargetType)(Int32)targetTypeValue; + } + else if (targetTypeValue.Type == JTokenType.String) + { + PhaseTargetType parsedType; + if (Enum.TryParse((String)targetTypeValue, ignoreCase: true, result: out parsedType)) + { + targetType = parsedType; + } + } + } + + if (targetType == null) + { + return existingValue; + } + + Object newValue = null; + switch (targetType) + { + case PhaseTargetType.DeploymentGroup: + newValue = new DeploymentGroupTarget(); + break; + + case PhaseTargetType.Server: + newValue = new ServerTarget(); + break; + + case PhaseTargetType.Queue: + newValue = new AgentQueueTarget(); + break; + + case PhaseTargetType.Pool: + newValue = new AgentPoolTarget(); + break; + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PhaseTargetType.cs b/src/Sdk/DTPipelines/Pipelines/PhaseTargetType.cs new file mode 100644 index 00000000000..3163e0b93c5 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PhaseTargetType.cs @@ -0,0 +1,24 @@ +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [ClientIgnore] + [EditorBrowsable(EditorBrowsableState.Never)] + public enum PhaseTargetType + { + [EnumMember] + Queue, + + [EnumMember] + Server, + + [EnumMember] + DeploymentGroup, + + [EnumMember] + Pool, + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineBuildContext.cs b/src/Sdk/DTPipelines/Pipelines/PipelineBuildContext.cs new file mode 100644 index 00000000000..828bf64ac99 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineBuildContext.cs @@ -0,0 +1,182 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineBuildContext : PipelineContextBase + { + public PipelineBuildContext() + : this(new BuildOptions()) + { + } + + public PipelineBuildContext( + IPipelineContext context, + BuildOptions options) + : base(context) + { + m_buildOptions = options ?? new BuildOptions(); + } + + public PipelineBuildContext( + BuildOptions buildOptions, + DictionaryContextData data = null, + ICounterStore counterStore = null, + IResourceStore resourceStore = null, + IList stepProviders = null, + ITaskStore taskStore = null, + IPackageStore packageStore = null, + IInputValidator inputValidator = null, + IPipelineTraceWriter trace = null, + EvaluationOptions expressionOptions = null, + IList phaseProviders = null) + : base(data, counterStore, packageStore, resourceStore, taskStore, stepProviders, null, trace, expressionOptions) + { + m_buildOptions = buildOptions ?? new BuildOptions(); + m_inputValidator = inputValidator; + m_phaseProviders = phaseProviders; + } + + public BuildOptions BuildOptions + { + get + { + return m_buildOptions; + } + } + + public IInputValidator InputValidator + { + get + { + return m_inputValidator; + } + } + + public IReadOnlyList PhaseProviders + { + get + { + return m_phaseProviders.ToList(); + } + } + + internal ValidationResult Validate(PipelineProcess process) + { + var result = new ValidationResult(); + + // If requested to do so, validate the container resource to ensure it was specified properly. This will + // also handle endpoint authorization if the container requires access to a docker registry. + if (this.ResourceStore != null) + { + foreach (var container in this.ResourceStore.Containers.GetAll().Where(x => x.Endpoint != null)) + { + result.ReferencedResources.AddEndpointReference(container.Endpoint); + + if (this.BuildOptions.ValidateResources) + { + var endpoint = this.ResourceStore.GetEndpoint(container.Endpoint); + if (endpoint == null) + { + result.UnauthorizedResources.AddEndpointReference(container.Endpoint); + result.Errors.Add(new PipelineValidationError(PipelineStrings.ServiceEndpointNotFound(container.Endpoint))); + } + else + { + if (!endpoint.Type.Equals(ServiceEndpointTypes.Docker, StringComparison.OrdinalIgnoreCase)) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.ContainerResourceInvalidRegistryEndpointType(container.Alias, endpoint.Type, endpoint.Name))); + } + else + { + container.Endpoint = new ServiceEndpointReference + { + Id = endpoint.Id, + }; + } + } + } + } + + foreach (var repository in this.ResourceStore.Repositories.GetAll()) + { + var expandedProperties = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var property in repository.Properties.GetItems()) + { + expandedProperties[property.Key] = this.ExpandVariables(property.Value); + } + + foreach (var expandedProperty in expandedProperties) + { + repository.Properties.Set(expandedProperty.Key, expandedProperty.Value); + } + } + + if (this.EnvironmentVersion > 1) + { + // always add self or designer repo to repository list + RepositoryResource defaultRepo = null; + var selfRepo = this.ResourceStore.Repositories.Get(PipelineConstants.SelfAlias); + if (selfRepo == null) + { + var designerRepo = this.ResourceStore.Repositories.Get(PipelineConstants.DesignerRepo); + if (designerRepo != null) + { + defaultRepo = designerRepo; + } + else + { + System.Diagnostics.Debug.Fail("Repositories are not filled in."); + } + } + else + { + defaultRepo = selfRepo; + } + + if (defaultRepo != null) + { + result.ReferencedResources.Repositories.Add(defaultRepo); + + // Add the endpoint + if (defaultRepo.Endpoint != null) + { + result.ReferencedResources.AddEndpointReference(defaultRepo.Endpoint); + + if (this.BuildOptions.ValidateResources) + { + var repositoryEndpoint = this.ResourceStore.GetEndpoint(defaultRepo.Endpoint); + if (repositoryEndpoint == null) + { + result.UnauthorizedResources?.AddEndpointReference(defaultRepo.Endpoint); + result.Errors.Add(new PipelineValidationError(PipelineStrings.ServiceEndpointNotFound(defaultRepo.Endpoint))); + } + else + { + defaultRepo.Endpoint = new ServiceEndpointReference() { Id = repositoryEndpoint.Id }; + } + } + } + } + } + } + + // Validate the graph of stages + GraphValidator.Validate(this, result, PipelineStrings.StageNameWhenNoNameIsProvided, null, process.Stages, Stage.GetErrorMessage); + + return result; + } + + private IInputValidator m_inputValidator; + private IList m_phaseProviders; + private BuildOptions m_buildOptions; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineBuildResult.cs b/src/Sdk/DTPipelines/Pipelines/PipelineBuildResult.cs new file mode 100644 index 00000000000..817946bc699 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineBuildResult.cs @@ -0,0 +1,73 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Validation; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineBuildResult + { + public PipelineBuildResult( + PipelineEnvironment environment, + PipelineProcess process, + ValidationResult result) + { + this.Environment = environment; + this.Process = process; + m_validationResult = result; + } + + /// + /// Gets the environment which was generated by the builder. + /// + public PipelineEnvironment Environment + { + get; + private set; + } + + /// + /// Gets the process which was generated by the builder. + /// + public PipelineProcess Process + { + get; + private set; + } + + /// + /// Gets the list of errors which occurred while building the pipeline. + /// + public IList Errors + { + get + { + return m_validationResult.Errors; + } + } + + /// + /// Gets the set of all resources which are referenced by the pipeline. + /// + public PipelineResources ReferencedResources + { + get + { + return m_validationResult.ReferencedResources; + } + } + + /// + /// Gets the set of unauthorized resources, if any, which are referenced by the pipeline. + /// + public PipelineResources UnauthorizedResources + { + get + { + return m_validationResult.UnauthorizedResources; + } + } + + private readonly ValidationResult m_validationResult; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineBuilder.cs b/src/Sdk/DTPipelines/Pipelines/PipelineBuilder.cs new file mode 100644 index 00000000000..40095be9b40 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineBuilder.cs @@ -0,0 +1,411 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Linq; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for customization of pipeline construction by different hosting environments. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineBuilder + { + /// + /// Initializes a new PipelineBuilder instance with default service implementations. + /// + public PipelineBuilder() + : this(null, null, null, null, null, null, null, null, null) + { + } + + /// + /// Initilaizes a new PipelineBuilder instance with the specified services used for resolution of + /// resources available to pipelines. + /// + /// The resources available for use within a pipeline + /// The tasks available for use within a pipeline + /// The secret stores available for use within a pipeline + public PipelineBuilder( + EvaluationOptions expressionOptions = null, + ICounterStore counterStore = null, + IPackageStore packageStore = null, + IResourceStore resourceStore = null, + IList stepProviders = null, + ITaskStore taskStore = null, + ITaskTemplateStore templateStore = null, + IPipelineIdGenerator idGenerator = null, + IList phaseProviders = null) + : this(new PipelineContextBuilder(counterStore, packageStore, resourceStore, stepProviders, taskStore, idGenerator, expressionOptions, phaseProviders), templateStore) + { + } + + /// + /// Initializes a new PipelineBuilder instance for the specified context. + /// + /// The context which should be used for validation + internal PipelineBuilder(IPipelineContext context) + : this(new PipelineContextBuilder(context)) + { + } + + private PipelineBuilder( + PipelineContextBuilder contextBuilder, + ITaskTemplateStore templateStore = null) + { + ArgumentUtility.CheckForNull(contextBuilder, nameof(contextBuilder)); + + m_contextBuilder = contextBuilder; + m_templateStore = templateStore; + } + + /// + /// Gets or sets the default queue which is assigned automatically to phases with no target and existing agent + /// queue targets without a queue specified. + /// + public AgentQueueReference DefaultQueue + { + get; + set; + } + + /// + /// Gets or sets the default agent specification which is assigned automatically to phases with no target and existing agent + /// queue targets without an agent specification specified. + /// + public JObject DefaultAgentSpecification + { + get; + set; + } + + /// + /// Gets or sets the default checkout options which are propagated to all repository resources defined + /// within a pipeline if explicit options are not provided at checkout. + /// + public CheckoutOptions DefaultCheckoutOptions + { + get; + set; + } + + /// + /// Gets or sets the default workspace options which are propagated to all agent jobs which do not explicitly + /// define overrides. + /// + public WorkspaceOptions DefaultWorkspaceOptions + { + get; + set; + } + + /// + /// Gets or sets the environment version which should be used. + /// + public Int32 EnvironmentVersion + { + get + { + return m_contextBuilder.EnvironmentVersion; + } + set + { + m_contextBuilder.EnvironmentVersion = value; + } + } + + /// + /// Gets the counter store configured for this builder. + /// + public ICounterStore CounterStore => m_contextBuilder.CounterStore; + + /// + /// Gets the ID generator configured for this builder. + /// + public IPipelineIdGenerator IdGenerator => m_contextBuilder.IdGenerator; + + /// + /// Gets the package store configured for this builder. + /// + public IPackageStore PackageStore => m_contextBuilder.PackageStore; + + /// + /// Gets the resource store configured for this builder. + /// + public IResourceStore ResourceStore => m_contextBuilder.ResourceStore; + + /// + /// Gets the user variables which have been set for this builder. + /// + public IList UserVariables => m_contextBuilder.UserVariables; + + /// + /// Gets the system variables which have been set for this builder. + /// + public IDictionary SystemVariables => m_contextBuilder.SystemVariables; + + /// + /// Builds a new PipelineProcess instance using the specified environment. + /// + /// A list of phases which should be used to build the process + /// The validation which should be performed on the pipeline + /// A PipelineBuildResult which contains a runnable pipeline process + public PipelineBuildResult Build( + IList phases, + BuildOptions options = null) + { + ArgumentUtility.CheckEnumerableForEmpty(phases, nameof(phases)); + + options = options ?? BuildOptions.None; + + var stage = new Stage + { + Name = PipelineConstants.DefaultJobName, + }; + + stage.Phases.AddRange(phases); + return Build(new[] { stage }, options); + } + + /// + /// Builds a new PipelineProcess instance. + /// + /// A list of stages which should be used to build the process + /// The validation which should be performed on the pipeline + /// A PipelineBuildResult which contains a runnable pipeline process + public PipelineBuildResult Build( + IList stages, + BuildOptions options = null, + TemplateToken environmentVariables = null) + { + var context = CreateBuildContext(options); + + if (this.DefaultCheckoutOptions != null) + { + foreach (var repository in context.ResourceStore.Repositories.GetAll()) + { + if (!repository.Properties.TryGetValue(RepositoryPropertyNames.CheckoutOptions, out _)) + { + repository.Properties.Set(RepositoryPropertyNames.CheckoutOptions, this.DefaultCheckoutOptions.Clone()); + } + } + } + + // First gather any validation errors and all referenced resources + var process = CreateProcess(context, stages); + var result = context.Validate(process); + + // Output the environment + var environment = result.Environment = new PipelineEnvironment(); + environment.Version = context.EnvironmentVersion; + environment.Counters.AddRange(context.CounterStore.Counters); + environment.Resources.MergeWith(context.ResourceStore.GetAuthorizedResources()); + environment.UserVariables.AddRange(m_contextBuilder.UserVariables); + environment.SystemVariables.AddRange(m_contextBuilder.SystemVariables); + environment.EnvironmentVariables = environmentVariables; + + return new PipelineBuildResult(result.Environment, process, result); + } + + public PipelineBuildContext CreateBuildContext( + BuildOptions options, + Boolean includeSecrets = false) + { + return m_contextBuilder.CreateBuildContext(options, this.PackageStore, includeSecrets); + } + + public PhaseExecutionContext CreatePhaseExecutionContext( + StageInstance stage, + PhaseInstance phase, + PipelineState state = PipelineState.InProgress, + DictionaryContextData data = null, + Boolean includeSecrets = false, + IPipelineTraceWriter trace = null, + ExecutionOptions executionOptions = null) + { + return m_contextBuilder.CreatePhaseExecutionContext(stage, phase, state, data, includeSecrets, trace, executionOptions); + } + + public StageExecutionContext CreateStageExecutionContext( + StageInstance stage, + PipelineState state = PipelineState.InProgress, + DictionaryContextData data = null, + Boolean includeSecrets = false, + IPipelineTraceWriter trace = null, + ExecutionOptions executionOptions = null) + { + return m_contextBuilder.CreateStageExecutionContext(stage, state, data, includeSecrets, trace, executionOptions); + } + + public IList Validate( + PipelineProcess process, + BuildOptions options = null) + { + ArgumentUtility.CheckForNull(process, nameof(process)); + + var context = CreateBuildContext(options); + return context.Validate(process).Errors; + } + + public IList Validate( + IList steps, + PhaseTarget target = null, + BuildOptions options = null) + { + ArgumentUtility.CheckForNull(steps, nameof(steps)); + + var phase = new Phase(); + phase.Steps.AddRange(steps); + phase.Target = target; + + var stage = new Stage(PipelineConstants.DefaultJobName, new[] { phase }); + + var context = CreateBuildContext(options); + var process = CreateProcess(context, new[] { stage }); + return context.Validate(process).Errors; + } + + public PipelineResources GetReferenceResources( + IList steps, + PhaseTarget target = null) + { + ArgumentUtility.CheckForNull(steps, nameof(steps)); + + var phase = new Phase(); + phase.Steps.AddRange(steps); + phase.Target = target; + + var stage = new Stage(PipelineConstants.DefaultJobName, new[] { phase }); + + var context = CreateBuildContext(new BuildOptions()); + var process = CreateProcess(context, new[] { stage }); + return context.Validate(process).ReferencedResources; + } + + private PipelineProcess CreateProcess( + PipelineBuildContext context, + IList stages) + { + ArgumentUtility.CheckForNull(context, nameof(context)); + ArgumentUtility.CheckEnumerableForEmpty(stages, nameof(stages)); + + // Now inject the tasks into each appropriate phase + foreach (var stage in stages) + { + foreach (var phaseNode in stage.Phases) + { + // Set the default target to be a queue target and leave it up to the hosting environment to + // specify a default in the resource store. + if (phaseNode.Target == null) + { + phaseNode.Target = new AgentQueueTarget(); + } + + // Agent queues are the default target type + if (phaseNode.Target.Type == PhaseTargetType.Queue && this.DefaultQueue != null) + { + var queueTarget = phaseNode.Target as AgentQueueTarget; + var useDefault = false; + var queue = queueTarget.Queue; + if (queue == null) + { + useDefault = true; + } + else if (queue.Id == 0) + { + var name = queue.Name; + if (name == null || (name.IsLiteral && String.IsNullOrEmpty(name.Literal))) + { + useDefault = true; + } + } + + if (useDefault) + { + queueTarget.Queue = this.DefaultQueue.Clone(); + if (queueTarget.AgentSpecification == null) + { + queueTarget.AgentSpecification = (JObject)DefaultAgentSpecification?.DeepClone(); + } + } + } + + // Set default workspace options + if (phaseNode.Target.Type == PhaseTargetType.Queue && this.DefaultWorkspaceOptions != null) + { + var queueTarget = phaseNode.Target as AgentQueueTarget; + if (queueTarget.Workspace == null) + { + queueTarget.Workspace = this.DefaultWorkspaceOptions.Clone(); + } + } + + var steps = default(IList); + if (phaseNode.Type == PhaseType.Phase) + { + steps = (phaseNode as Phase).Steps; + } + else if (phaseNode.Type == PhaseType.JobFactory) + { + steps = (phaseNode as JobFactory).Steps; + } + + if (steps != null) + { + var resolvedSteps = new List(); + foreach (var step in steps.Where(x => x.Enabled)) + { + if (step.Type == StepType.Task) + { + var taskStep = step as TaskStep; + resolvedSteps.Add(taskStep); + } + else if (step.Type == StepType.Group) + { + var taskStepGroup = step as GroupStep; + resolvedSteps.Add(taskStepGroup); + } + else if (step.Type == StepType.Action) + { + var actionStep = step as ActionStep; + resolvedSteps.Add(actionStep); + } + else if (step.Type == StepType.TaskTemplate) + { + var templateStep = step as TaskTemplateStep; + if (m_templateStore == null) + { + throw new ArgumentException(PipelineStrings.TemplateStoreNotProvided(templateStep.Name, nameof(ITaskTemplateStore))); + } + + var resolvedTasks = m_templateStore.ResolveTasks(templateStep); + resolvedSteps.AddRange(resolvedTasks); + } + else + { + // We should never be here. + Debug.Fail(step.Type.ToString()); + } + } + + steps.Clear(); + steps.AddRange(resolvedSteps); + } + } + } + + return new PipelineProcess(stages); + } + + private readonly ITaskTemplateStore m_templateStore; + private readonly PipelineContextBuilder m_contextBuilder; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineConstants.cs b/src/Sdk/DTPipelines/Pipelines/PipelineConstants.cs new file mode 100644 index 00000000000..323f0fee5f0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineConstants.cs @@ -0,0 +1,221 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides constant values for constructs used in the pipeline APIs. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public static class PipelineConstants + { + /// + /// The minimum agent version when performing an advanced checkout. This demand + /// is required when multiple checkout steps are used, when the checkout step + /// is not the first step, or when any repository is checked out other than self + /// or none. + /// + public static readonly String AdvancedCheckoutMinAgentVersion = "2.137.0"; + + public static readonly String AgentVersionDemandName = "Runner.Version"; + + public static readonly String AgentName = "Agent.Name"; + + /// + /// The default job cancel timeout in minutes. + /// + public static readonly Int32 DefaultJobCancelTimeoutInMinutes = 5; + + /// + /// The default job name. This job name is used when a phase does not leverage multipliers + /// or slicing and only has one implicit job. + /// + public static readonly String DefaultJobName = "__default"; + + /// + /// The default job display name. For when the user doesn't specify names for anything. + /// + public static readonly String DefaultJobDisplayName = "Job"; + + /// + /// The default job timeout in minutes. + /// + public static readonly Int32 DefaultJobTimeoutInMinutes = 360; + + /// + /// The max length for a node within a pipeline - e.g. a stage name or a job name. + /// + public static readonly Int32 MaxNodeNameLength = 100; + + /// + /// The repository alias to use for dont-sync-sources. + /// + public static readonly String NoneAlias = "none"; + + /// + /// Alias for the self repository. + /// + public static readonly String SelfAlias = "self"; + + /// + /// Alias for the repository coming from designer build definition. + /// + public static readonly String DesignerRepo = "__designer_repo"; + + /// + /// Error code during graph validation. + /// + internal const String DependencyNotFound = nameof(DependencyNotFound); + + /// + /// Error code during graph validation. + /// + internal const String GraphContainsCycle = nameof(GraphContainsCycle); + + /// + /// Error code during graph validation. + /// + internal const String NameInvalid = nameof(NameInvalid); + + /// + /// Error code during graph validation. + /// + internal const String NameNotUnique = nameof(NameNotUnique); + + /// + /// Error code during graph validation. + /// + internal const String StartingPointNotFound = nameof(StartingPointNotFound); + + internal const String CheckpointNodeInstanceNameClaimKey = "nodeInstanceName"; + internal const String CheckpointIdClaimKey = "checkpointId"; + + public static class CheckoutTaskInputs + { + public static readonly String Repository = "repository"; + public static readonly String Ref = "ref"; + public static readonly String Version = "version"; + public static readonly String Token = "token"; + public static readonly String Clean = "clean"; + public static readonly String Submodules = "submodules"; + public static readonly String Lfs = "lfs"; + public static readonly String FetchDepth = "fetchDepth"; + public static readonly String PersistCredentials = "persistCredentials"; + public static readonly String Path = "path"; + public static readonly String WorkspaceRepo = "workspaceRepo"; + + public static class SubmodulesOptions + { + public static readonly String Recursive = "recursive"; + public static readonly String True = "true"; + } + } + + public static class WorkspaceCleanOptions + { + public static readonly String Outputs = "outputs"; + public static readonly String Resources = "resources"; + public static readonly String All = "all"; + } + + public static class EnvironmentVariables + { + public static readonly String EnvironmentId = "Environment.Id"; + public static readonly String EnvironmentName = "Environment.Name"; + public static readonly String EnvironmentResourceId = "Environment.ResourceId"; + public static readonly String EnvironmentResourceName = "Environment.ResourceName"; + } + + public static readonly TaskDefinition CheckoutTask = new TaskDefinition + { + Id = new Guid("6d15af64-176c-496d-b583-fd2ae21d4df4"), + Name = "Checkout", + FriendlyName = "Get sources", + Author = "Microsoft", + RunsOn = { TaskRunsOnConstants.RunsOnAgent }, + Version = new TaskVersion("1.0.0"), + Description = "Get sources from a repository. Supports Git, TfsVC, and SVN repositories.", + HelpMarkDown = "[More Information](https://github.com)", + Inputs = { + new TaskInputDefinition() + { + Name = CheckoutTaskInputs.Repository, + Required = true, + InputType = TaskInputType.Repository + }, + new TaskInputDefinition() + { + Name = CheckoutTaskInputs.Clean, + Required = false, + DefaultValue = Boolean.TrueString, + InputType = TaskInputType.Boolean + }, + // Git + new TaskInputDefinition() + { + Name = CheckoutTaskInputs.Submodules, // True or Recursive + Required = false, + InputType = TaskInputType.String + }, + new TaskInputDefinition() + { + Name = CheckoutTaskInputs.Lfs, // Checkout lfs object + Required = false, + DefaultValue = Boolean.FalseString, + InputType = TaskInputType.Boolean + }, + new TaskInputDefinition() + { + Name = CheckoutTaskInputs.FetchDepth, // Enable shallow fetch + Required = false, + InputType = TaskInputType.String + }, + new TaskInputDefinition() + { + Name = CheckoutTaskInputs.PersistCredentials, // Allow script git + Required = false, + DefaultValue = Boolean.FalseString, + InputType = TaskInputType.Boolean + }, + }, + Execution = + { + { + "agentPlugin", + JObject.FromObject(new Dictionary(){ { "target", "Agent.Plugins.Repository.CheckoutTask, Agent.Plugins"} }) + } + }, + PostJobExecution = + { + { + "agentPlugin", + JObject.FromObject(new Dictionary(){ { "target", "Agent.Plugins.Repository.CleanupTask, Agent.Plugins"} }) + } + } + }; + + public static class ScriptStepInputs + { + public static readonly String Script = "script"; + public static readonly String WorkingDirectory = "workingDirectory"; + public static readonly String Shell = "shell"; + } + + public static Boolean IsCheckoutTask(this Step step) + { + if (step is TaskStep task && + task.Reference.Id == PipelineConstants.CheckoutTask.Id && + task.Reference.Version == PipelineConstants.CheckoutTask.Version) + { + return true; + } + else + { + return false; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineContextBase.cs b/src/Sdk/DTPipelines/Pipelines/PipelineContextBase.cs new file mode 100644 index 00000000000..4daecff3f76 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineContextBase.cs @@ -0,0 +1,439 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Logging; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides base functionality for all contexts used during build and execution if a pipeline. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PipelineContextBase : IPipelineContext + { + private protected PipelineContextBase(IPipelineContext context) + { + this.EnvironmentVersion = context.EnvironmentVersion; + this.SystemVariableNames.UnionWith(context.SystemVariableNames); + this.Variables.AddRange(context.Variables); + m_referencedResources = context.ReferencedResources?.Clone(); + + this.CounterStore = context.CounterStore; + this.IdGenerator = context.IdGenerator ?? new PipelineIdGenerator(); + this.PackageStore = context.PackageStore; + this.ResourceStore = context.ResourceStore; + this.TaskStore = context.TaskStore; + this.Trace = context.Trace; + m_secretMasker = new Lazy(() => CreateSecretMasker()); + + // This is a copy, don't dynamically set pipeline decorators, they are already set. + this.StepProviders = context.StepProviders; + + if (context.Data?.Count > 0) + { + m_data = new DictionaryContextData(); + foreach (var pair in context.Data) + { + m_data[pair.Key] = pair.Value; + } + } + } + + private protected PipelineContextBase( + DictionaryContextData data, + ICounterStore counterStore, + IPackageStore packageStore, + IResourceStore resourceStore, + ITaskStore taskStore, + IList stepProviders, + IPipelineIdGenerator idGenerator = null, + IPipelineTraceWriter trace = null, + EvaluationOptions expressionOptions = null) + { + m_data = data; + this.CounterStore = counterStore; + this.ExpressionOptions = expressionOptions ?? new EvaluationOptions(); + this.IdGenerator = idGenerator ?? new PipelineIdGenerator(); + this.PackageStore = packageStore; + this.ResourceStore = resourceStore; + this.TaskStore = taskStore; + this.Trace = trace; + m_secretMasker = new Lazy(() => CreateSecretMasker()); + + // Setup pipeline decorators + var aggregatedStepProviders = new List(); + + // Add resources first + if (this.ResourceStore != null) + { + aggregatedStepProviders.Add(this.ResourceStore); + } + + // Add custom pipeline decorators + if (stepProviders != null) + { + aggregatedStepProviders.AddRange(stepProviders); + } + + this.StepProviders = aggregatedStepProviders; + } + + /// + /// Gets the counter store for the current context + /// + public ICounterStore CounterStore + { + get; + } + + // Gets the available context when evaluating expressions + public DictionaryContextData Data + { + get + { + if (m_data == null) + { + m_data = new DictionaryContextData(); + } + return m_data; + } + } + + /// + /// Gets or sets the version of the environment + /// + public Int32 EnvironmentVersion + { + get; + internal set; + } + + /// + /// Gets the expression evaluation options for this context. + /// + public EvaluationOptions ExpressionOptions + { + get; + } + + /// + /// Gets the id generator configured for this context. + /// + public IPipelineIdGenerator IdGenerator + { + get; + } + + /// + /// Gets the package store configured for this context. + /// + public IPackageStore PackageStore + { + get; + } + + /// + /// Gets the resources referenced within this context. + /// + public PipelineResources ReferencedResources + { + get + { + if (m_referencedResources == null) + { + m_referencedResources = new PipelineResources(); + } + return m_referencedResources; + } + } + + /// + /// Gets the resource store for the current context + /// + public IResourceStore ResourceStore + { + get; + } + + /// + /// Gets the step providers for the current context + /// + public IReadOnlyList StepProviders + { + get; + } + + /// + /// Gets the secret masker for the current context + /// + public ISecretMasker SecretMasker + { + get + { + return m_secretMasker.Value; + } + } + + /// + /// Gets the task store for the current context + /// + public ITaskStore TaskStore + { + get; + } + + /// + /// Gets the trace for the current context + /// + public IPipelineTraceWriter Trace + { + get; + } + + /// + /// Gets the system variable names for the current context + /// + public ISet SystemVariableNames + { + get + { + if (m_systemVariables == null) + { + m_systemVariables = new HashSet(StringComparer.OrdinalIgnoreCase); + } + return m_systemVariables; + } + } + + /// + /// Gets the variables configured on the context + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new VariablesDictionary(); + } + return m_variables; + } + } + + /// + /// Gets a value indicating if secret variables have been accessed + /// + protected virtual Boolean SecretsAccessed + { + get + { + return m_variables?.SecretsAccessed.Count > 0; + } + } + + public virtual ISecretMasker CreateSecretMasker() + { + var secretMasker = new SecretMasker(); + + // Add variable secrets + if (m_variables?.Count > 0) + { + foreach (var variable in m_variables.Values.Where(x => x.IsSecret)) + { + secretMasker.AddValue(variable.Value); + } + } + + return secretMasker; + } + + /// + /// Expand macros of the format $(variableName) using the current context. + /// + /// The value which contains macros to expand + /// True if secrets should be replaced with '***'; otherwise, false + /// The evaluated value with all defined macros expanded to the value in the current context + public String ExpandVariables( + String value, + Boolean maskSecrets = false) + { + if (!String.IsNullOrEmpty(value) && m_variables?.Count > 0) + { + return VariableUtility.ExpandVariables(value, m_variables, false, maskSecrets); + } + else + { + return value; + } + } + + /// + /// Expand macros of the format $(variableName) using the current context. + /// + /// The JToken value which contains macros to expand + /// The evaluated value with all defined macros expanded to the value in the current context + public JToken ExpandVariables(JToken value) + { + if (value != null && m_variables?.Count > 0) + { + return VariableUtility.ExpandVariables(value, m_variables, false); + } + else + { + return value; + } + } + + /// + /// Expand all variables and macros present as values (not keys) in a given JObject. + /// Conditionally record unresolved expressions or macros as errors. + /// + public ExpressionResult Evaluate(JObject value) + { + if (value == null) + { + return null; + } + + var containsSecrets = false; + String ResolveExpression(String s) + { + if (!ExpressionValue.IsExpression(s)) + { + return s; + } + + String resolvedValue = null; + try + { + var expressionResult = Evaluate(ExpressionValue.TrimExpression(s)); + containsSecrets |= expressionResult.ContainsSecrets; + resolvedValue = expressionResult.Value; + } + catch (ExpressionException) + { + return s; + } + + if (!String.IsNullOrEmpty(resolvedValue)) + { + return resolvedValue; + } + + return s; + } + + // recurse through object + var resolvedSpec = new JObject(); + foreach (var pair in value) + { + var v = pair.Value; + switch (v.Type) + { + case JTokenType.Object: + // recurse + var expressionResult = Evaluate(v.Value()); + containsSecrets |= expressionResult.ContainsSecrets; + resolvedSpec[pair.Key] = expressionResult.Value; + break; + case JTokenType.String: + // resolve + resolvedSpec[pair.Key] = ExpandVariables(ResolveExpression(v.Value())); + break; + default: + // no special handling + resolvedSpec[pair.Key] = v; + break; + } + } + + return new ExpressionResult(resolvedSpec, containsSecrets); + } + + /// + /// Evalutes the provided expression using the current context. + /// + /// The type of result expected + /// The expression string to evaluate + /// A value indicating the evaluated result and whether or not secrets were accessed during evaluation + public ExpressionResult Evaluate(String expression) + { + if (!m_parsedExpressions.TryGetValue(expression, out IExpressionNode parsedExpression)) + { + parsedExpression = new ExpressionParser().CreateTree(expression, this.Trace, this.GetSupportedNamedValues(), this.GetSupportedFunctions()); + m_parsedExpressions.Add(expression, parsedExpression); + } + + this.ResetSecretsAccessed(); + var evaluationResult = parsedExpression.Evaluate(this.Trace, this.SecretMasker, this, this.ExpressionOptions); + throw new NotSupportedException(); + } + + protected virtual void ResetSecretsAccessed() + { + m_variables?.SecretsAccessed.Clear(); + } + + protected virtual IEnumerable GetSupportedFunctions() + { + return Enumerable.Empty(); + } + + protected virtual IEnumerable GetSupportedNamedValues() + { + return Enumerable.Empty(); + } + + internal void SetSystemVariables(IEnumerable variables) + { + foreach (var variable in variables) + { + this.SystemVariableNames.Add(variable.Name); + this.Variables[variable.Name] = new VariableValue(variable.Value, variable.Secret); + } + } + + internal void SetUserVariables(IEnumerable variables) + { + foreach (var variable in variables.Where(x=>!x.Name.StartsWith("system.", StringComparison.OrdinalIgnoreCase) && !this.SystemVariableNames.Contains(x.Name))) + { + this.Variables[variable.Name] = new VariableValue(variable.Value, variable.Secret); + } + } + + internal void SetUserVariables(IDictionary variables) + { + // Do not allow user variables to override system variables which were set at a higher scope. In this case + // the system variable should always win rather than the most specific variable winning. + foreach (var variable in variables.Where(x => !x.Key.StartsWith("system.", StringComparison.OrdinalIgnoreCase) && !this.SystemVariableNames.Contains(x.Key))) + { + this.Variables[variable.Key] = variable.Value; + } + } + + internal void SetSystemVariables(IDictionary variables) + { + if (variables?.Count > 0) + { + foreach (var variable in variables) + { + this.SystemVariableNames.Add(variable.Key); + this.Variables[variable.Key] = variable.Value?.Clone(); + } + } + } + + private DictionaryContextData m_data; + private VariablesDictionary m_variables; + private HashSet m_systemVariables; + private Lazy m_secretMasker; + private PipelineResources m_referencedResources; + private Dictionary m_parsedExpressions = new Dictionary(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineContextBuilder.cs b/src/Sdk/DTPipelines/Pipelines/PipelineContextBuilder.cs new file mode 100644 index 00000000000..79f3939a0d3 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineContextBuilder.cs @@ -0,0 +1,562 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + internal sealed class PipelineContextBuilder + { + public PipelineContextBuilder() + : this(null, null, null, null, null) + { + } + + public PipelineContextBuilder( + ICounterStore counterStore = null, + IPackageStore packageStore = null, + IResourceStore resourceStore = null, + IList stepProviders = null, + ITaskStore taskStore = null, + IPipelineIdGenerator idGenerator = null, + EvaluationOptions expressionOptions = null, + IList phaseProviders = null) + { + this.EnvironmentVersion = 2; + this.CounterStore = counterStore ?? new CounterStore(); + this.IdGenerator = idGenerator ?? new PipelineIdGenerator(); + this.PackageStore = packageStore ?? new PackageStore(); + this.ResourceStore = resourceStore ?? new ResourceStore(); + this.StepProviders = stepProviders ?? new List(); + this.TaskStore = taskStore ?? new TaskStore(); + this.ExpressionOptions = expressionOptions ?? new EvaluationOptions(); + this.PhaseProviders = phaseProviders ?? new List(); + } + + internal PipelineContextBuilder(IPipelineContext context) + : this(context.CounterStore, context.PackageStore, context.ResourceStore, context.StepProviders.ToList(), context.TaskStore, context.IdGenerator, context.ExpressionOptions) + { + m_context = context; + + var userVariables = new List(); + var systemVariables = new VariablesDictionary(); + foreach (var variable in context.Variables) + { + if (context.SystemVariableNames.Contains(variable.Key)) + { + systemVariables[variable.Key] = variable.Value.Clone(); + } + else + { + var userVariable = new Variable + { + Name = variable.Key, + Secret = variable.Value?.IsSecret ?? false, + Value = variable.Value?.Value, + }; + + userVariables.Add(userVariable); + } + } + + // For simplicity the variables are currently marked as read-only for the explicit + // context scenario. + m_userVariables = userVariables.AsReadOnly(); + m_systemVariables = systemVariables.AsReadOnly(); + } + + /// + /// Gets the counter store configured for the builder. + /// + public ICounterStore CounterStore + { + get; + } + + public IEnvironmentStore EnvironmentStore + { + get; + } + + /// + /// Gets or sets the environment version, controlling behaviors related to variable groups and step injection. + /// + public Int32 EnvironmentVersion + { + get; + set; + } + + /// + /// Gets the generator for pipeline identifiers. + /// + public IPipelineIdGenerator IdGenerator + { + get; + } + + /// + /// Gets the package store configured for the builder. + /// + public IPackageStore PackageStore + { + get; + } + + /// + /// Gets the resource store configured for the builder. + /// + public IResourceStore ResourceStore + { + get; + } + + /// + /// Gets the list of step providers configured for the builder. + /// + public IList StepProviders + { + get; + } + + public IList PhaseProviders + { + get; + } + + /// + /// Gets the task store configured for the builder. + /// + public ITaskStore TaskStore + { + get; + } + + /// + /// Gets the expression evaluation options configured for the builder. + /// + public EvaluationOptions ExpressionOptions + { + get; + } + + /// + /// Gets a list of variable sets included in the pipeline. + /// + public IList UserVariables + { + get + { + if (m_userVariables == null) + { + m_userVariables = new List(); + } + return m_userVariables; + } + } + + /// + /// Gets the system variables included in the pipeline. + /// + public IDictionary SystemVariables + { + get + { + if (m_systemVariables == null) + { + m_systemVariables = new VariablesDictionary(); + } + return m_systemVariables; + } + } + + public PipelineBuildContext CreateBuildContext( + BuildOptions options, + IPackageStore packageStore = null, + Boolean includeSecrets = false) + { + PipelineBuildContext context = null; + if (m_context == null) + { + context = new PipelineBuildContext(options, null, this.CounterStore, this.ResourceStore, this.StepProviders, this.TaskStore, packageStore, new InputValidator(), null, this.ExpressionOptions, this.PhaseProviders); + SetVariables(context, includeSecrets: includeSecrets); + context.EnvironmentVersion = this.EnvironmentVersion; + } + else + { + context = new PipelineBuildContext(m_context, options); + } + + return context; + } + + public StageExecutionContext CreateStageExecutionContext( + StageInstance stage, + PipelineState state = PipelineState.InProgress, + DictionaryContextData data = null, + Boolean includeSecrets = false, + IPipelineTraceWriter trace = null, + ExecutionOptions executionOptions = null) + { + if (m_context != null) + { + throw new NotSupportedException(); + } + + var context = new StageExecutionContext(stage, state, data, this.CounterStore, this.PackageStore, this.ResourceStore, this.TaskStore, this.StepProviders, this.IdGenerator, trace, this.ExpressionOptions, executionOptions); + SetVariables(context, stage, includeSecrets: includeSecrets); + context.EnvironmentVersion = this.EnvironmentVersion; + return context; + } + + public PhaseExecutionContext CreatePhaseExecutionContext( + StageInstance stage, + PhaseInstance phase, + PipelineState state = PipelineState.InProgress, + DictionaryContextData data = null, + Boolean includeSecrets = false, + IPipelineTraceWriter trace = null, + ExecutionOptions executionOptions = null) + { + if (m_context != null) + { + throw new NotSupportedException(); + } + + var context = new PhaseExecutionContext(stage, phase, state, data, this.CounterStore, this.PackageStore, this.ResourceStore, this.TaskStore, this.StepProviders, this.IdGenerator, trace, this.ExpressionOptions, executionOptions); + SetVariables(context, stage, phase, includeSecrets); + context.EnvironmentVersion = this.EnvironmentVersion; + return context; + } + + private void SetVariables( + IPipelineContext context, + StageInstance stage = null, + PhaseInstance phase = null, + Boolean includeSecrets = false) + { + // Next merge variables specified from alternative sources in the order they are presented. This may + // be specified by build/release definitions or lower constructs, or may be specified by the user as + // input variables to override other values. + var referencedVariableGroups = new Dictionary(StringComparer.OrdinalIgnoreCase); + var expressionsToEvaluate = new Dictionary>(StringComparer.OrdinalIgnoreCase); + if (m_userVariables?.Count > 0 || stage?.Definition?.Variables.Count > 0 || phase?.Definition?.Variables.Count > 0) + { + var userVariables = this.UserVariables.Concat(stage?.Definition?.Variables ?? Enumerable.Empty()).Concat(phase?.Definition?.Variables ?? Enumerable.Empty()); + foreach (var variable in userVariables) + { + if (variable is Variable inlineVariable) + { + if (ExpressionValue.TryParse(inlineVariable.Value, out var expression)) + { + expressionsToEvaluate[inlineVariable.Name] = expression; + } + + if (context.Variables.TryGetValue(inlineVariable.Name, out VariableValue existingValue)) + { + existingValue.Value = inlineVariable.Value; + existingValue.IsSecret |= inlineVariable.Secret; + + // Remove the reference to the variable group + referencedVariableGroups.Remove(inlineVariable.Name); + } + else + { + context.Variables[inlineVariable.Name] = new VariableValue(inlineVariable.Value, inlineVariable.Secret); + } + } + else if (variable is VariableGroupReference groupReference) + { + var variableGroup = this.ResourceStore.VariableGroups.Get(groupReference); + if (variableGroup == null) + { + throw new ResourceNotFoundException(PipelineStrings.VariableGroupNotFound(variableGroup)); + } + + // A pre-computed list of keys wins if it is present, otherwise we compute it dynamically + if (groupReference.SecretStore?.Keys.Count > 0) + { + foreach (var key in groupReference.SecretStore.Keys) + { + // Ignore the key if it isn't present in the variable group + if (!variableGroup.Variables.TryGetValue(key, out var variableGroupEntry)) + { + continue; + } + + // Variable groups which have secrets providers use delayed resolution depending on targets + // being invoked, etc. + if (context.Variables.TryGetValue(key, out VariableValue existingValue)) + { + existingValue.Value = variableGroupEntry.Value; + existingValue.IsSecret |= variableGroupEntry.IsSecret; + referencedVariableGroups[key] = variableGroup; + } + else + { + var clonedValue = variableGroupEntry.Clone(); + clonedValue.Value = variableGroupEntry.Value; + context.Variables[key] = clonedValue; + referencedVariableGroups[key] = variableGroup; + } + } + } + else + { + foreach (var variableGroupEntry in variableGroup.Variables.Where(v => v.Value != null)) + { + // Variable groups which have secrets providers use delayed resolution depending on targets + // being invoked, etc. + if (context.Variables.TryGetValue(variableGroupEntry.Key, out VariableValue existingValue)) + { + existingValue.Value = variableGroupEntry.Value.Value; + existingValue.IsSecret |= variableGroupEntry.Value.IsSecret; + referencedVariableGroups[variableGroupEntry.Key] = variableGroup; + } + else + { + var clonedValue = variableGroupEntry.Value.Clone(); + clonedValue.Value = variableGroupEntry.Value.Value; + context.Variables[variableGroupEntry.Key] = clonedValue; + referencedVariableGroups[variableGroupEntry.Key] = variableGroup; + } + } + } + } + } + } + + // System variables get applied last as they always win + if (m_systemVariables?.Count > 0 || stage != null || phase != null) + { + // Start with system variables specified in the pipeline and then overlay scopes in order + var systemVariables = m_systemVariables == null + ? new VariablesDictionary() + : new VariablesDictionary(m_systemVariables); + + // Setup stage variables + if (stage != null) + { + systemVariables[WellKnownDistributedTaskVariables.StageDisplayName] = stage.Definition?.DisplayName ?? stage.Name; + systemVariables[WellKnownDistributedTaskVariables.StageId] = this.IdGenerator.GetStageInstanceId(stage.Name, stage.Attempt).ToString("D"); + systemVariables[WellKnownDistributedTaskVariables.StageName] = stage.Name; + systemVariables[WellKnownDistributedTaskVariables.StageAttempt] = stage.Attempt.ToString(); + } + + // Setup phase variables + if (phase != null) + { + systemVariables[WellKnownDistributedTaskVariables.PhaseDisplayName] = phase.Definition?.DisplayName ?? phase.Name; + systemVariables[WellKnownDistributedTaskVariables.PhaseId] = this.IdGenerator.GetPhaseInstanceId(stage?.Name, phase.Name, phase.Attempt).ToString("D"); + systemVariables[WellKnownDistributedTaskVariables.PhaseName] = phase.Name; + systemVariables[WellKnownDistributedTaskVariables.PhaseAttempt] = phase.Attempt.ToString(); + } + + foreach (var systemVariable in systemVariables) + { + referencedVariableGroups.Remove(systemVariable.Key); + context.Variables[systemVariable.Key] = systemVariable.Value?.Clone(); + + if (ExpressionValue.TryParse(systemVariable.Value?.Value, out var expression)) + { + expressionsToEvaluate[systemVariable.Key] = expression; + } + + context.SystemVariableNames.Add(systemVariable.Key); + } + } + + if (referencedVariableGroups.Count > 0 || expressionsToEvaluate.Count > 0) + { + context.Trace?.EnterProperty("Variables"); + } + + // Now populate the environment with variable group resources which are needed for execution + if (referencedVariableGroups.Count > 0) + { + foreach (var variableGroupData in referencedVariableGroups.GroupBy(x => x.Value, x => x.Key, s_comparer.Value)) + { + // If our variable group accesses an external service via a service endpoint we need to ensure + // that is also tracked as a required resource to execute this pipeline. + var groupReference = ToGroupReference(variableGroupData.Key, variableGroupData.ToList()); + if (groupReference?.SecretStore != null) + { + // Add the variable group reference to the list of required resources + context.ReferencedResources.VariableGroups.Add(groupReference); + + // Add this resource as authorized for use by this pipeline since the variable group requires + // it to function and the variable group was successfully authorized. + if (groupReference.SecretStore.Endpoint != null) + { + this.ResourceStore.Endpoints.Authorize(groupReference.SecretStore.Endpoint); + context.ReferencedResources.Endpoints.Add(groupReference.SecretStore.Endpoint); + } + + if (groupReference.SecretStore.Keys.Count == 0) + { + continue; + } + + // Make sure we don't unnecessarily retrieve values + var valueProvider = this.ResourceStore.VariableGroups.GetValueProvider(groupReference); + if (valueProvider == null) + { + continue; + } + + var variableGroup = this.ResourceStore.GetVariableGroup(groupReference); + ServiceEndpoint endpoint = null; + if (groupReference.SecretStore.Endpoint != null) + { + endpoint = this.ResourceStore.GetEndpoint(groupReference.SecretStore.Endpoint); + if (endpoint == null) + { + throw new DistributedTaskException(PipelineStrings.ServiceConnectionUsedInVariableGroupNotValid(groupReference.SecretStore.Endpoint, groupReference.Name)); + } + } + + if (!valueProvider.ShouldGetValues(context)) + { + // This will ensure that no value is provided by the server since we expect it to be set by a task + foreach (var key in groupReference.SecretStore.Keys) + { + context.Trace?.Info($"{key}: $[ variablegroups.{variableGroup.Name}.{key} ]"); + expressionsToEvaluate.Remove(key); + context.Variables[key] = new VariableValue(null, true); + } + } + else + { + var values = valueProvider.GetValues(variableGroup, endpoint, groupReference.SecretStore.Keys, includeSecrets); + if (values != null) + { + foreach (var value in values) + { + context.Trace?.Info($"{value.Key}: $[ variablegroups.{variableGroup.Name}.{value.Key} ]"); + expressionsToEvaluate.Remove(value.Key); + + if (includeSecrets || !value.Value.IsSecret) + { + context.Variables[value.Key] = value.Value; + } + else + { + context.Variables[value.Key].Value = null; + } + } + } + } + } + } + } + + // Now resolve the expressions discovered earlier + if (expressionsToEvaluate.Count > 0) + { + foreach (var variableExpression in expressionsToEvaluate) + { + context.Trace?.EnterProperty(variableExpression.Key); + var result = variableExpression.Value.GetValue(context); + context.Trace?.LeaveProperty(variableExpression.Key); + + if (context.Variables.TryGetValue(variableExpression.Key, out VariableValue existingValue)) + { + existingValue.Value = result.Value; + existingValue.IsSecret |= result.ContainsSecrets; + } + else + { + context.Variables[variableExpression.Key] = new VariableValue(result.Value, result.ContainsSecrets); + } + } + } + + // Filter out secret variables if we are not supposed to include them in the context + if (!includeSecrets) + { + foreach (var secretValue in context.Variables.Values.Where(x => x.IsSecret)) + { + secretValue.Value = null; + } + } + + if (referencedVariableGroups.Count > 0 || expressionsToEvaluate.Count > 0) + { + context.Trace?.LeaveProperty("Variables"); + } + } + + private static VariableGroupReference ToGroupReference( + VariableGroup group, + IList keys) + { + if (group == null || keys == null || keys.Count == 0) + { + return null; + } + + var storeConfiguration = ToSecretStoreConfiguration(group, keys); + return new VariableGroupReference + { + Id = group.Id, + Name = group.Name, + GroupType = group.Type, + SecretStore = storeConfiguration, + }; + } + + private static SecretStoreConfiguration ToSecretStoreConfiguration( + VariableGroup group, + IList keys) + { + if (keys.Count == 0) + { + return null; + } + + var keyVaultData = group.ProviderData as AzureKeyVaultVariableGroupProviderData; + var configuration = new SecretStoreConfiguration + { + StoreName = keyVaultData?.Vault ?? group.Name, + }; + + if (keyVaultData != null && keyVaultData.ServiceEndpointId != Guid.Empty) + { + configuration.Endpoint = new ServiceEndpointReference + { + Id = keyVaultData.ServiceEndpointId, + }; + } + + configuration.Keys.AddRange(keys); + return configuration; + } + + private sealed class VariableGroupComparer : IEqualityComparer + { + public Boolean Equals( + VariableGroup x, + VariableGroup y) + { + return x?.Id == y?.Id; + } + + public Int32 GetHashCode(VariableGroup obj) + { + return obj.Id.GetHashCode(); + } + } + + private IList m_userVariables; + private VariablesDictionary m_systemVariables; + private readonly IPipelineContext m_context; + private static readonly Lazy s_comparer = new Lazy(() => new VariableGroupComparer()); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineEnvironment.cs b/src/Sdk/DTPipelines/Pipelines/PipelineEnvironment.cs new file mode 100644 index 00000000000..a774b4c5677 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineEnvironment.cs @@ -0,0 +1,212 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineEnvironment : IOrchestrationEnvironment + { + public PipelineEnvironment() + { + this.Version = 1; + } + + /// + /// Gets the resources available for use within the environment. + /// + public PipelineResources Resources + { + get + { + if (m_resources == null) + { + m_resources = new PipelineResources(); + } + return m_resources; + } + set + { + m_resources = value; + } + } + + /// + /// Gets the counter values, by prefix, which have been allocated for this environment. + /// + public IDictionary Counters + { + get + { + if (m_counters == null) + { + m_counters = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_counters; + } + } + + /// + /// Gets or sets the data value for any context needed to be passed down to the agent + /// + public IDictionary Data + { + get + { + if (m_data == null) + { + m_data = new Dictionary(); + } + return m_data; + } + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken EnvironmentVariables + { + get; + set; + } + + /// + /// Gets or sets the user variables collection. Variables are applied in order, meaning if variable names + /// appear more than once the last value will be represented in the environment. + /// + public IList UserVariables + { + get + { + if (m_userVariables == null) + { + m_userVariables = new List(); + } + return m_userVariables; + } + } + + /// + /// Gets the system variables collection. System variables are always applied last in order to enforce + /// precedence. + /// + public IDictionary SystemVariables + { + get + { + if (m_systemVariables == null) + { + m_systemVariables = new VariablesDictionary(); + } + return m_systemVariables; + } + } + + /// + /// Gets the explicit variables defined for use within the pipeline. + /// + [Obsolete("This property is obsolete. Use UserVariables and/or SystemVariables instead")] + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new VariablesDictionary(); + } + return m_variables; + } + } + + /// + /// Gets the execution options for this pipeline. + /// + public ExecutionOptions Options + { + get + { + return m_options; + } + } + + /// + /// Gets the version of the environment. + /// + [DefaultValue(1)] + [DataMember(Name = "Version", EmitDefaultValue = false)] + public Int32 Version + { + get; + set; + } + + OrchestrationProcessType IOrchestrationEnvironment.ProcessType + { + get + { + return m_processType; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_resources?.Count == 0) + { + m_resources = null; + } + + if (m_counters?.Count == 0) + { + m_counters = null; + } + + if (m_data?.Count == 0) + { + m_data = null; + } + + if (m_userVariables?.Count == 0) + { + m_userVariables = null; + } + + if (m_systemVariables?.Count == 0) + { + m_systemVariables = null; + } + + if (m_variables?.Count == 0) + { + m_variables = null; + } + } + + [DataMember(Name = "Counters", EmitDefaultValue = false)] + private Dictionary m_counters; + + [DataMember(Name = "Data", EmitDefaultValue = false)] + private Dictionary m_data; + + [DataMember(Name = "Options")] + private ExecutionOptions m_options = new ExecutionOptions(); + + [DataMember(Name = "ProcessType")] + private OrchestrationProcessType m_processType = OrchestrationProcessType.Pipeline; + + [DataMember(Name = "Resources", EmitDefaultValue = false)] + private PipelineResources m_resources; + + [DataMember(Name = "SystemVariables", EmitDefaultValue = false)] + private VariablesDictionary m_systemVariables; + + [DataMember(Name = "UserVariables", EmitDefaultValue = false)] + private IList m_userVariables; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private VariablesDictionary m_variables; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineException.cs b/src/Sdk/DTPipelines/Pipelines/PipelineException.cs new file mode 100644 index 00000000000..1e54cdb84a9 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineException.cs @@ -0,0 +1,297 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineException : VssServiceException + { + public PipelineException(String message) + : base(message) + { + } + + public PipelineException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + /// + /// Initializes an exception from serialized data + /// + /// object holding the serialized data + /// context info about the source or destination + protected PipelineException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class AmbiguousResourceSpecificationException : PipelineException + { + public AmbiguousResourceSpecificationException(String message) + : base(message) + { + } + + public AmbiguousResourceSpecificationException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + protected AmbiguousResourceSpecificationException( + SerializationInfo info, + StreamingContext context) : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class AmbiguousTaskSpecificationException : PipelineException + { + public AmbiguousTaskSpecificationException(String message) + : base(message) + { + } + + public AmbiguousTaskSpecificationException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + protected AmbiguousTaskSpecificationException( + SerializationInfo info, + StreamingContext context) : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class InvalidPipelineOperationException : PipelineException + { + public InvalidPipelineOperationException(String message) : base(message) + { + } + + public InvalidPipelineOperationException( + String message, + Exception innerException) : base(message, innerException) + { + } + + protected InvalidPipelineOperationException( + SerializationInfo info, + StreamingContext context) : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class ResourceNotFoundException : PipelineException + { + public ResourceNotFoundException(String message) + : base(message) + { + } + + public ResourceNotFoundException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + protected ResourceNotFoundException( + SerializationInfo info, + StreamingContext context) : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class ResourceNotAuthorizedException : PipelineException + { + public ResourceNotAuthorizedException(String message) + : base(message) + { + } + + public ResourceNotAuthorizedException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + protected ResourceNotAuthorizedException( + SerializationInfo info, + StreamingContext context) : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class ResourceValidationException : PipelineException + { + public ResourceValidationException(String message) + : base(message) + { + } + + public ResourceValidationException( + String message, + String propertyName) + : base(message) + { + this.PropertyName = propertyName; + } + + public ResourceValidationException( + String message, + String propertyName, + Exception innerException) + : base(message, innerException) + { + this.PropertyName = propertyName; + } + + public ResourceValidationException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + protected ResourceValidationException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + + /// + /// Gets the property name of the resource which caused the error. + /// + public String PropertyName + { + get; + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class StageNotFoundException : PipelineException + { + public StageNotFoundException(String message) + : base(message) + { + } + + public StageNotFoundException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + protected StageNotFoundException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineValidationException : PipelineException + { + public PipelineValidationException() + : this(PipelineStrings.PipelineNotValid()) + { + } + + // Report first 2 error messages, due to space limit on printing this error message in UI + public PipelineValidationException(IEnumerable errors) + : this(PipelineStrings.PipelineNotValidWithErrors(string.Join(",", (errors ?? Enumerable.Empty()).Take(2).Select(e => e.Message)))) + { + m_errors = new List(errors ?? Enumerable.Empty()); + } + + public PipelineValidationException(String message) + : base(message) + { + } + + public PipelineValidationException( + String message, + Exception innerException) + : base(message, innerException) + { + } + + public IList Errors + { + get + { + if (m_errors == null) + { + m_errors = new List(); + } + return m_errors; + } + } + + /// + /// Initializes an exception from serialized data + /// + /// object holding the serialized data + /// context info about the source or destination + protected PipelineValidationException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + + private List m_errors; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public class MaxJobExpansionException : PipelineValidationException + { + public MaxJobExpansionException(IEnumerable errors) + : base(errors) + { + } + + public MaxJobExpansionException(String message) + : base(message) + { + } + + public MaxJobExpansionException( + String message, + Exception innerException) + : base(message, innerException) + { + } + protected MaxJobExpansionException( + SerializationInfo info, + StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineIdGenerator.cs b/src/Sdk/DTPipelines/Pipelines/PipelineIdGenerator.cs new file mode 100644 index 00000000000..79d63183cea --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineIdGenerator.cs @@ -0,0 +1,114 @@ +using System; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineIdGenerator : IPipelineIdGenerator + { + public PipelineIdGenerator(Boolean preserveCase = false) + { + m_preserveCase = preserveCase; + } + + public Guid GetInstanceId(params String[] segments) + { + return PipelineUtilities.GetInstanceId(GetInstanceName(segments), m_preserveCase); + } + + public String GetInstanceName(params String[] segments) + { + return PipelineUtilities.GetInstanceName(segments); + } + + public String GetStageIdentifier(String stageName) + { + return PipelineUtilities.GetStageIdentifier(stageName); + } + + public Guid GetStageInstanceId( + String stageName, + Int32 attempt) + { + return PipelineUtilities.GetStageInstanceId(stageName, attempt, m_preserveCase); + } + + public String GetStageInstanceName( + String stageName, + Int32 attempt) + { + return PipelineUtilities.GetStageInstanceName(stageName, attempt); + } + + public String GetPhaseIdentifier( + String stageName, + String phaseName) + { + return PipelineUtilities.GetPhaseIdentifier(stageName, phaseName); + } + + public Guid GetPhaseInstanceId( + String stageName, + String phaseName, + Int32 attempt) + { + return PipelineUtilities.GetPhaseInstanceId(stageName, phaseName, attempt, m_preserveCase); + } + + public String GetPhaseInstanceName( + String stageName, + String phaseName, + Int32 attempt) + { + return PipelineUtilities.GetPhaseInstanceName(stageName, phaseName, attempt); + } + + public String GetJobIdentifier( + String stageName, + String phaseName, + String jobName) + { + return PipelineUtilities.GetJobIdentifier(stageName, phaseName, jobName); + } + + public Guid GetJobInstanceId( + String stageName, + String phaseName, + String jobName, + Int32 attempt) + { + return PipelineUtilities.GetJobInstanceId(stageName, phaseName, jobName, attempt, m_preserveCase); + } + + public String GetJobInstanceName( + String stageName, + String phaseName, + String jobName, + Int32 attempt) + { + return PipelineUtilities.GetJobInstanceName(stageName, phaseName, jobName, attempt); + } + + public String GetTaskInstanceName( + String stageName, + String phaseName, + String jobName, + Int32 jobAttempt, + String taskName) + { + return PipelineUtilities.GetTaskInstanceName(stageName, phaseName, jobName, jobAttempt, taskName); + } + + public Guid GetTaskInstanceId( + String stageName, + String phaseName, + String jobName, + Int32 jobAttempt, + String taskName) + { + return PipelineUtilities.GetTaskInstanceId(stageName, phaseName, jobName, jobAttempt, taskName, m_preserveCase); + } + + private Boolean m_preserveCase; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineProcess.cs b/src/Sdk/DTPipelines/Pipelines/PipelineProcess.cs new file mode 100644 index 00000000000..d423203f998 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineProcess.cs @@ -0,0 +1,115 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineProcess : IOrchestrationProcess + { + [JsonConstructor] + public PipelineProcess() + { + } + + public PipelineProcess(IList phases) + { + var stage = CreateDefaultStage(); + stage.Phases.AddRange(phases ?? Enumerable.Empty()); + this.Stages.Add(stage); + } + + public PipelineProcess(IList stages) + { + if (stages?.Count > 0) + { + m_stages = new List(stages); + } + } + + public IList Stages + { + get + { + if (m_stages == null) + { + m_stages = new List(); + } + return m_stages; + } + } + + OrchestrationProcessType IOrchestrationProcess.ProcessType + { + get + { + return OrchestrationProcessType.Pipeline; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_stages?.Count == 1 && String.Equals(m_stages[0].Name, PipelineConstants.DefaultJobName, StringComparison.OrdinalIgnoreCase)) + { + m_phases = new List(m_stages[0].Phases); + m_stages = null; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (m_phases?.Count > 0) + { + var stage = CreateDefaultStage(); + stage.Phases.AddRange(m_phases); + + m_phases = null; + this.Stages.Add(stage); + } + } + + private static Stage CreateDefaultStage() + { + return new Stage { Name = PipelineConstants.DefaultJobName }; + } + + /// + /// return the node at the given path + /// + public IGraphNode GetNodeAtPath(IList path) + { + var length = path?.Count(); + var node = default(IGraphNode); + if (length > 0) + { + // find stage + node = this.Stages.FirstOrDefault(x => string.Equals(x.Name, path[0], StringComparison.OrdinalIgnoreCase) + || string.Equals(x.Name, PipelineConstants.DefaultJobName, StringComparison.OrdinalIgnoreCase)); + if (length > 1 && node != null) + { + // find phase + node = (node as Stage).Phases.FirstOrDefault(x => string.Equals(x.Name, path[1], StringComparison.OrdinalIgnoreCase) + || string.Equals(x.Name, PipelineConstants.DefaultJobName, StringComparison.OrdinalIgnoreCase)); + + // NOTE: jobs / phase configurations are not IGraphNodes yet + } + } + + return node; + } + + [DataMember(Name = "Stages", EmitDefaultValue = false)] + private List m_stages; + + [DataMember(Name = "Phases", EmitDefaultValue = false)] + private List m_phases; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineResource.cs b/src/Sdk/DTPipelines/Pipelines/PipelineResource.cs new file mode 100644 index 00000000000..cb3d343247f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineResource.cs @@ -0,0 +1,57 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class PipelinePropertyNames + { + public static readonly String Artifacts = "artifacts"; + public static readonly String Branch = "branch"; + public static readonly String DefinitionId = "definitionId"; + public static readonly String PipelineId = "pipelineId"; + public static readonly String Project = "project"; + public static readonly String ProjectId = "projectId"; + public static readonly String Source = "source"; + public static readonly String Tags = "tags"; + public static readonly String Version = "version"; + } + + /// + /// Provides a data contract for a pipeline resource referenced by a pipeline. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineResource : Resource + { + public PipelineResource() + { + } + + protected PipelineResource(PipelineResource resourceToCopy) + : base(resourceToCopy) + { + } + + /// + /// Gets or sets the version of the build resource. + /// + public String Version + { + get + { + return this.Properties.Get(PipelinePropertyNames.Version); + } + set + { + this.Properties.Set(PipelinePropertyNames.Version, value); + } + } + + public PipelineResource Clone() + { + return new PipelineResource(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineResources.cs b/src/Sdk/DTPipelines/Pipelines/PipelineResources.cs new file mode 100644 index 00000000000..fa0d6f7e5ce --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineResources.cs @@ -0,0 +1,570 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides collections of securable resources available for use within a pipeline. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineResources + { + /// + /// Initializes a new PipelineResources instance with empty resource collections. + /// + public PipelineResources() + { + } + + private PipelineResources(PipelineResources resourcesToCopy) + { + if (resourcesToCopy.m_builds?.Count > 0) + { + m_builds = new HashSet(resourcesToCopy.m_builds.Select(x => x.Clone()), new ResourceComparer()); + } + + if (resourcesToCopy.m_containers?.Count > 0) + { + m_containers = new HashSet(resourcesToCopy.m_containers.Select(x => x.Clone()), new ResourceComparer()); + } + + if (resourcesToCopy.m_endpoints?.Count > 0) + { + m_endpoints = new HashSet(resourcesToCopy.m_endpoints.Select(x => x.Clone()), new EndpointComparer()); + } + + if (resourcesToCopy.m_environments?.Count > 0) + { + m_environments = new HashSet(resourcesToCopy.m_environments.Select(x => x.Clone()), new EnvironmentComparer()); + } + + if (resourcesToCopy.m_files?.Count > 0) + { + m_files = new HashSet(resourcesToCopy.m_files.Select(x => x.Clone()), new FileComparer()); + } + + if (resourcesToCopy.m_pipelines?.Count > 0) + { + m_pipelines = new HashSet(resourcesToCopy.m_pipelines.Select(x => x.Clone()), new ResourceComparer()); + } + + if (resourcesToCopy.m_queues?.Count > 0) + { + m_queues = new HashSet(resourcesToCopy.m_queues.Select(x => x.Clone()), new QueueComparer()); + } + + if (resourcesToCopy.m_pools?.Count > 0) + { + m_pools = new HashSet(resourcesToCopy.m_pools.Select(x => x.Clone()), new PoolComparer()); + } + + if (resourcesToCopy.m_repositories?.Count > 0) + { + m_repositories = new HashSet(resourcesToCopy.m_repositories.Select(x => x.Clone()), new ResourceComparer()); + } + + if (resourcesToCopy.m_variableGroups?.Count > 0) + { + m_variableGroups = new HashSet(resourcesToCopy.m_variableGroups.Select(x => x.Clone()), new VariableGroupComparer()); + } + } + + /// + /// Gets the total count of resources. + /// + public Int32 Count => (m_builds?.Count ?? 0) + + (m_containers?.Count ?? 0) + + (m_endpoints?.Count ?? 0) + + (m_environments?.Count ?? 0) + + (m_files?.Count ?? 0) + + (m_pipelines?.Count ?? 0) + + (m_queues?.Count ?? 0) + + (m_pools?.Count ?? 0) + + (m_repositories?.Count ?? 0) + + (m_variableGroups?.Count ?? 0); + + /// + /// List of all resources that need to be sent to PolicyService + /// + public IEnumerable GetSecurableResources() + { + foreach (var resourceCollection in new IEnumerable[] { + m_endpoints, + m_environments, + m_files, + m_queues, + m_pools, + m_variableGroups + }) + { + if (resourceCollection != null) + { + foreach (var r in resourceCollection) + { + if (r != null) + { + yield return r; + } + } + } + } + } + + /// + /// Gets the collection of build resources defined in the pipeline. + /// + public ISet Builds + { + get + { + if (m_builds == null) + { + m_builds = new HashSet(new ResourceComparer()); + } + return m_builds; + } + } + + /// + /// Gets the collection of container resources defined in the pipeline. + /// + public ISet Containers + { + get + { + if (m_containers == null) + { + m_containers = new HashSet(new ResourceComparer()); + } + return m_containers; + } + } + + /// + /// Gets the collection of endpoint references available in the resources of a pipeline. + /// + public ISet Endpoints + { + get + { + if (m_endpoints == null) + { + m_endpoints = new HashSet(new EndpointComparer()); + } + return m_endpoints; + } + } + + /// + /// Gets the collection of environments listed with deployment job in pipeline. + /// + public ISet Environments + { + get + { + if (m_environments == null) + { + m_environments = new HashSet(new EnvironmentComparer()); + } + return m_environments; + } + } + + /// + /// Gets the collection of secure file references available in the resources of a pipeline. + /// + public ISet Files + { + get + { + if (m_files == null) + { + m_files = new HashSet(new FileComparer()); + } + return m_files; + } + } + + /// + /// Gets the collection of pipeline resources defined in the pipeline. + /// + public ISet Pipelines + { + get + { + if (m_pipelines == null) + { + m_pipelines = new HashSet(new ResourceComparer()); + } + return m_pipelines; + } + } + + /// + /// Gets the collection of agent queue references available in the resources of a pipeline. + /// + public ISet Queues + { + get + { + if (m_queues == null) + { + m_queues = new HashSet(new QueueComparer()); + } + return m_queues; + } + } + + /// + /// Gets the collection of agent pool references available in the resources of a pipeline. + /// + public ISet Pools + { + get + { + if (m_pools == null) + { + m_pools = new HashSet(new PoolComparer()); + } + return m_pools; + } + } + + /// + /// Gets the collection of repository resources defined in the pipeline. + /// + public ISet Repositories + { + get + { + if (m_repositories == null) + { + m_repositories = new HashSet(new ResourceComparer()); + } + return m_repositories; + } + } + + /// + /// Gets the collection of variable group references available in the resources of a pipeline. + /// + public ISet VariableGroups + { + get + { + if (m_variableGroups == null) + { + m_variableGroups = new HashSet(new VariableGroupComparer()); + } + return m_variableGroups; + } + } + + public PipelineResources Clone() + { + return new PipelineResources(this); + } + + public void MergeWith(PipelineResources resources) + { + if (resources != null) + { + this.Builds.UnionWith(resources.Builds); + this.Containers.UnionWith(resources.Containers); + this.Endpoints.UnionWith(resources.Endpoints); + this.Environments.UnionWith(resources.Environments); + this.Files.UnionWith(resources.Files); + this.Pipelines.UnionWith(resources.Pipelines); + this.Queues.UnionWith(resources.Queues); + this.Pools.UnionWith(resources.Pools); + this.Repositories.UnionWith(resources.Repositories); + this.VariableGroups.UnionWith(resources.VariableGroups); + } + } + + internal void AddEndpointReference(String endpointId) + { + if (Guid.TryParse(endpointId, out Guid endpointIdValue)) + { + this.Endpoints.Add(new ServiceEndpointReference { Id = endpointIdValue }); + } + else + { + this.Endpoints.Add(new ServiceEndpointReference { Name = endpointId }); + } + } + + internal void AddEndpointReference(ServiceEndpointReference reference) + { + this.Endpoints.Add(reference); + } + + internal void AddSecureFileReference(String fileId) + { + if (Guid.TryParse(fileId, out Guid fileIdValue)) + { + this.Files.Add(new SecureFileReference { Id = fileIdValue }); + } + else + { + this.Files.Add(new SecureFileReference { Name = fileId }); + } + } + + internal void AddSecureFileReference(SecureFileReference reference) + { + this.Files.Add(reference); + } + + internal void AddAgentQueueReference(AgentQueueReference reference) + { + this.Queues.Add(reference); + } + + internal void AddAgentPoolReference(AgentPoolReference reference) + { + this.Pools.Add(reference); + } + + internal void AddVariableGroupReference(VariableGroupReference reference) + { + this.VariableGroups.Add(reference); + } + + internal void AddEnvironmentReference(EnvironmentReference reference) + { + this.Environments.Add(reference); + } + + internal void Clear() + { + m_builds?.Clear(); + m_containers?.Clear(); + m_endpoints?.Clear(); + m_files?.Clear(); + m_pipelines?.Clear(); + m_queues?.Clear(); + m_pools?.Clear(); + m_repositories?.Clear(); + m_variableGroups?.Clear(); + m_environments?.Clear(); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_builds?.Count == 0) + { + m_builds = null; + } + + if (m_containers?.Count == 0) + { + m_containers = null; + } + + if (m_endpoints?.Count == 0) + { + m_endpoints = null; + } + + if (m_files?.Count == 0) + { + m_files = null; + } + + if (m_pipelines?.Count == 0) + { + m_pipelines = null; + } + + if (m_queues?.Count == 0) + { + m_queues = null; + } + + if (m_pools?.Count == 0) + { + m_pools = null; + } + + if (m_repositories?.Count == 0) + { + m_repositories = null; + } + + if (m_variableGroups?.Count == 0) + { + m_variableGroups = null; + } + + if (m_environments?.Count == 0) + { + m_environments = null; + } + } + + [DataMember(Name = "Builds", EmitDefaultValue = false)] + private HashSet m_builds; + + [DataMember(Name = "Containers", EmitDefaultValue = false)] + private HashSet m_containers; + + [DataMember(Name = "Endpoints", EmitDefaultValue = false)] + private HashSet m_endpoints; + + [DataMember(Name = "Files", EmitDefaultValue = false)] + private HashSet m_files; + + [DataMember(Name = "Pipelines", EmitDefaultValue = false)] + private HashSet m_pipelines; + + [DataMember(Name = "Queues", EmitDefaultValue = false)] + private HashSet m_queues; + + [DataMember(Name = "Pools", EmitDefaultValue = false)] + private HashSet m_pools; + + [DataMember(Name = "Repositories", EmitDefaultValue = false)] + private HashSet m_repositories; + + [DataMember(Name = "VariableGroups", EmitDefaultValue = false)] + private HashSet m_variableGroups; + + [DataMember(Name = "Environments", EmitDefaultValue = false)] + private HashSet m_environments; + + internal abstract class ResourceReferenceComparer : IEqualityComparer where TResource : ResourceReference + { + protected ResourceReferenceComparer(IEqualityComparer idComparer) + { + m_idComparer = idComparer; + } + + public abstract TId GetId(TResource resource); + + public Boolean Equals( + TResource left, + TResource right) + { + if (left == null && right == null) + { + return true; + } + + if ((left != null && right == null) || (left == null && right != null)) + { + return false; + } + + var leftId = GetId(left); + var rightId = GetId(right); + if (m_idComparer.Equals(leftId, default(TId)) && m_idComparer.Equals(rightId, default(TId))) + { + return StringComparer.OrdinalIgnoreCase.Equals(left.Name, right.Name); + } + else + { + return m_idComparer.Equals(leftId, rightId); + } + } + + public Int32 GetHashCode(TResource obj) + { + var identifier = GetId(obj); + if (!m_idComparer.Equals(identifier, default(TId))) + { + return identifier.GetHashCode(); + } + else + { + return StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Name); + } + } + + private readonly IEqualityComparer m_idComparer; + } + + internal class EndpointComparer : ResourceReferenceComparer + { + public EndpointComparer() + : base(EqualityComparer.Default) + { + } + + public override Guid GetId(ServiceEndpointReference resource) + { + return resource.Id; + } + } + + private class FileComparer : ResourceReferenceComparer + { + public FileComparer() + : base(EqualityComparer.Default) + { + } + + public override Guid GetId(SecureFileReference resource) + { + return resource.Id; + } + } + + private class QueueComparer : ResourceReferenceComparer + { + public QueueComparer() + : base(EqualityComparer.Default) + { + } + + public override Int32 GetId(AgentQueueReference resource) + { + return resource.Id; + } + } + + private class PoolComparer : ResourceReferenceComparer + { + public PoolComparer() + : base(EqualityComparer.Default) + { + } + + public override Int32 GetId(AgentPoolReference resource) + { + return resource.Id; + } + } + + private class VariableGroupComparer : ResourceReferenceComparer + { + public VariableGroupComparer() + : base(EqualityComparer.Default) + { + } + + public override Int32 GetId(VariableGroupReference resource) + { + return resource.Id; + } + } + + private class EnvironmentComparer : ResourceReferenceComparer + { + public EnvironmentComparer() + : base(EqualityComparer.Default) + { + } + + public override Int32 GetId(EnvironmentReference resource) + { + return resource.Id; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineState.cs b/src/Sdk/DTPipelines/Pipelines/PipelineState.cs new file mode 100644 index 00000000000..f280ef33785 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineState.cs @@ -0,0 +1,22 @@ +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public enum PipelineState + { + [EnumMember] + NotStarted, + + [EnumMember] + InProgress, + + [EnumMember] + Canceling, + + [EnumMember] + Completed, + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineStepsTemplate.cs b/src/Sdk/DTPipelines/Pipelines/PipelineStepsTemplate.cs new file mode 100644 index 00000000000..04ce0759e7d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineStepsTemplate.cs @@ -0,0 +1,55 @@ +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineStepsTemplate + { + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + public IList Errors + { + get + { + if (m_errors == null) + { + m_errors = new List(); + } + return m_errors; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_steps?.Count == 0) + { + m_steps = null; + } + + if (m_errors?.Count == 0) + { + m_errors = null; + } + } + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private List m_steps; + + [DataMember(Name = "Errors", EmitDefaultValue = false)] + private List m_errors; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineTemplate.cs b/src/Sdk/DTPipelines/Pipelines/PipelineTemplate.cs new file mode 100644 index 00000000000..b2ca723493d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineTemplate.cs @@ -0,0 +1,147 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineTemplate + { + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public PipelineResources Resources + { + get + { + if (m_resources == null) + { + m_resources = new PipelineResources(); + } + return m_resources; + } + } + + [DataMember(EmitDefaultValue = false)] + public TemplateToken EnvironmentVariables + { + get; + set; + } + + public IList Variables + { + get + { + if (m_variables == null) + { + m_variables = new List(); + } + return m_variables; + } + } + + public IList Stages + { + get + { + if (m_stages == null) + { + m_stages = new List(); + } + return m_stages; + } + } + + public IList Triggers + { + get + { + if (m_triggers == null) + { + m_triggers = new List(); + } + return m_triggers; + } + } + + public IList Errors + { + get + { + if (m_errors == null) + { + m_errors = new List(); + } + return m_errors; + } + } + + [DataMember(EmitDefaultValue = false)] + public String InitializationLog + { + get; + set; + } + + public void CheckErrors() + { + if (m_errors?.Count > 0) + { + throw new PipelineValidationException(m_errors); + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_stages?.Count == 0) + { + m_stages = null; + } + + if (m_errors?.Count == 0) + { + m_errors = null; + } + + if (m_triggers?.Count == 0) + { + m_triggers = null; + } + + if (m_resources?.Count == 0) + { + m_resources = null; + } + + if (m_variables?.Count == 0) + { + m_variables = null; + } + } + + [DataMember(Name = "Stages", EmitDefaultValue = false)] + private List m_stages; + + [DataMember(Name = "Errors", EmitDefaultValue = false)] + private List m_errors; + + [DataMember(Name = "Triggers", EmitDefaultValue = false)] + private List m_triggers; + + [DataMember(Name = "Resources", EmitDefaultValue = false)] + private PipelineResources m_resources; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private List m_variables; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineTrigger.cs b/src/Sdk/DTPipelines/Pipelines/PipelineTrigger.cs new file mode 100644 index 00000000000..a4f900ae55b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineTrigger.cs @@ -0,0 +1,22 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PipelineTrigger + { + public PipelineTrigger(PipelineTriggerType triggerType) + { + TriggerType = triggerType; + } + + /// + /// The type of the trigger. + /// + public PipelineTriggerType TriggerType + { + get; + private set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineTriggerType.cs b/src/Sdk/DTPipelines/Pipelines/PipelineTriggerType.cs new file mode 100644 index 00000000000..4b88d938504 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineTriggerType.cs @@ -0,0 +1,18 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public enum PipelineTriggerType + { + /// + /// A pipeline should be started for each changeset. + /// + ContinuousIntegration = 2, + + /// + /// A pipeline should be triggered when a GitHub pull request is created or updated. + /// + PullRequest = 64, + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineUtilities.cs b/src/Sdk/DTPipelines/Pipelines/PipelineUtilities.cs new file mode 100644 index 00000000000..ff9a7db7e25 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineUtilities.cs @@ -0,0 +1,407 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class PipelineUtilities + { + public static Guid GetInstanceId( + String identifier, + Boolean preserveCase = false) + { + if (preserveCase) + { + return TimelineRecordIdGenerator.GetId(identifier); + } + else + { + return TimelineRecordIdGenerator.GetId(identifier?.ToLowerInvariant()); + } + } + + /// + /// This is the format for producing "instance names" + /// Instance names are defined to be node "identifiers" (the logical path to the node in the graph) + /// plus a single attempt number suffixed at the end. + /// Identifiers are constant accross different attempts of the same node. + /// Instance names are not, but will differ only in attempt number. + /// + public static String GetInstanceName(params String[] segments) + { + return String.Join(".", segments.Where(x => !String.IsNullOrEmpty(x)).Select(x => x.Trim('.'))); + } + + public static String GetName(String identifier) + { + ArgumentUtility.CheckStringForNullOrEmpty(identifier, nameof(identifier)); + + var separatorIndex = identifier.LastIndexOf('.'); + return separatorIndex >= 0 ? identifier.Substring(separatorIndex + 1) : identifier; + } + + public static Guid GetStageInstanceId( + StageInstance stage, + Boolean preserveCase = false) + { + return GetStageInstanceId(stage.Name, stage.Attempt, preserveCase); + } + + public static String GetStageIdentifier(StageInstance stage) + { + return GetStageIdentifier(stage.Name); + } + + public static String GetStageIdentifier(String stageName) + { + return GetStageInstanceName(stageName, 1); + } + + public static String GetStageInstanceName(StageInstance stage) + { + return GetStageInstanceName(stage.Name, stage.Attempt); + } + + public static Guid GetStageInstanceId( + String stageName, + Int32 stageAttempt, + Boolean preserveCase = false) + { + return GetInstanceId(GetStageInstanceName(stageName, stageAttempt, true), preserveCase); + } + + public static String GetStageInstanceName( + String stageName, + Int32 stageAttempt) + { + return GetStageInstanceName(stageName, stageAttempt, true); + } + + public static String GetStageInstanceName( + String stageName, + Int32 stageAttempt, + Boolean includeDefault) + { + if (!String.IsNullOrEmpty(stageName) && + (includeDefault || !stageName.Equals(PipelineConstants.DefaultJobName, StringComparison.OrdinalIgnoreCase))) + { + var instanceName = stageName; + if (stageAttempt > 1) + { + instanceName = $"{stageName}.{stageAttempt}"; + } + + return instanceName; + } + + return String.Empty; + } + + public static String GetPhaseIdentifier( + StageInstance stage, + PhaseInstance phase) + { + return GetPhaseIdentifier(stage?.Name, phase.Name); + } + + public static String GetPhaseIdentifier( + String stageName, + String phaseName) + { + return GetPhaseInstanceName(stageName, phaseName, 1); + } + + public static Guid GetPhaseInstanceId( + StageInstance stage, + PhaseInstance phase, + Boolean preserveCase = false) + { + return GetPhaseInstanceId(stage?.Name, phase.Name, phase.Attempt, preserveCase); + } + + public static Guid GetPhaseInstanceId( + String stageName, + String phaseName, + Int32 phaseAttempt, + Boolean preserveCase = false) + { + return GetInstanceId(GetPhaseInstanceName(stageName, phaseName, phaseAttempt), preserveCase); + } + + /// + /// The phase "instance name" is the phase identifier suffixed with the phase attempt. + /// + public static String GetPhaseInstanceName( + StageInstance stage, + PhaseInstance phase) + { + var sb = new StringBuilder(GetStageInstanceName(stage?.Name, stageAttempt: 1, false)); + if (sb.Length > 0) + { + sb.Append("."); + } + + sb.Append($"{phase.Name}"); + if (phase.Attempt > 1) + { + sb.Append($".{phase.Attempt}"); + } + + return sb.ToString(); + } + + public static String GetPhaseInstanceName( + String stageName, + String phaseName, + Int32 phaseAttempt) + { + var sb = new StringBuilder(GetStageInstanceName(stageName, stageAttempt: 1, false)); + if (sb.Length > 0) + { + sb.Append("."); + } + + sb.Append($"{phaseName}"); + if (phaseAttempt > 1) + { + sb.Append($".{phaseAttempt}"); + } + + return sb.ToString(); + } + + public static String GetJobIdentifier( + StageInstance stage, + PhaseInstance phase, + JobInstance job) + { + return GetJobIdentifier(stage?.Name, phase.Name, job.Name); + } + + public static String GetJobIdentifier( + String stageName, + String phaseName, + String jobName) + { + return GetJobInstanceName(stageName, phaseName, jobName, 1); + } + + public static Guid GetJobInstanceId( + StageInstance stage, + PhaseInstance phase, + JobInstance job, + Boolean preserveCase = false) + { + return GetJobInstanceId(stage?.Name, phase.Name, job.Name, job.Attempt, preserveCase); + } + + public static Guid GetJobInstanceId( + String stageName, + String phaseName, + String jobName, + Int32 jobAttempt, + Boolean preserveCase = false) + { + return GetInstanceId(GetJobInstanceName(stageName, phaseName, jobName, jobAttempt), preserveCase); + } + + public static String GetJobInstanceName( + StageInstance stage, + PhaseInstance phase, + JobInstance job) + { + return GetJobInstanceName(stage?.Name, phase.Name, job.Name, job.Attempt); + } + + public static String GetJobInstanceName( + String jobIdentifier, + Int32 jobAttempt) + { + var sb = new StringBuilder(jobIdentifier); + if (jobAttempt > 1) + { + sb.Append($".{jobAttempt}"); + } + + return sb.ToString(); + } + + public static String GetJobInstanceName(TimelineRecord job) + { + if (job.Attempt <= 1) + { + return job.Identifier; + } + else + { + return $"{job.Identifier}.{job.Attempt}"; + } + } + + public static String GetJobInstanceName( + String stageName, + String phaseName, + String jobName, + Int32 jobAttempt) + { + var sb = new StringBuilder(GetPhaseInstanceName(stageName, phaseName, 1)); + sb.Append($".{jobName}"); + if (jobAttempt > 1) + { + sb.Append($".{jobAttempt}"); + } + + return sb.ToString(); + } + + public static Guid GetTaskInstanceId( + String stageName, + String phaseName, + String jobName, + Int32 jobAttempt, + String taskName, + Boolean preserveCase = false) + { + return GetInstanceId(GetTaskInstanceName(stageName, phaseName, jobName, jobAttempt, taskName), preserveCase); + } + + public static String GetTaskInstanceName( + String stageName, + String phaseName, + String jobName, + Int32 jobAttempt, + String taskName) + { + return $"{GetJobInstanceName(stageName, phaseName, jobName, jobAttempt)}.{taskName}"; + } + + public static String GetTaskInstanceName( + TimelineRecord jobRecord, + TimelineRecord taskRecord) + { + return $"{GetJobInstanceName(jobRecord)}.{taskRecord.RefName}"; + } + + public static TaskResult MergeResult( + TaskResult result, + TaskResult childResult) + { + // If the final status is already failed then we can't get any worse + if (result == TaskResult.Canceled || result == TaskResult.Failed) + { + return result; + } + + switch (childResult) + { + case TaskResult.Canceled: + result = TaskResult.Canceled; + break; + + case TaskResult.Failed: + case TaskResult.Abandoned: + result = TaskResult.Failed; + break; + + case TaskResult.SucceededWithIssues: + if (result == TaskResult.Succeeded) + { + result = TaskResult.SucceededWithIssues; + } + break; + } + + return result; + } + + public static TaskResult AggregateResult(IEnumerable results) + { + var result = TaskResult.Succeeded; + if (results == default) + { + return result; + } + foreach (var r in results) + { + result = MergeResult(result, r); + } + return result; + } + + /// + /// returns the node path from pipeline root to instance node + /// + public static IList GetPathComponents(String instanceName) + { + var result = new List(); + if (!String.IsNullOrEmpty(instanceName)) + { + var tokens = instanceName.Split('.'); + var i = 0; + if (Guid.TryParse(tokens[i], out var _)) + { + // first parameter might be a guid + i = 1; + } + + // ignore attempt numbers -- these are not meaningful as path components + for (var end = tokens.Length; i < end; ++i) + { + var t = tokens[i]; + + // node names may only contain numbers, letters, and '_' + // node names must begin with at letter. + result.AddIf(!Int32.TryParse(t, out var _), t); + } + } + + return result; + } + + /// + /// A legal node name starts with a letter or '_', and is entirely + /// comprised of alphanumeric characters or the ['_', '-'] characters. + /// + public static Boolean IsLegalNodeName(string name) + { + if (string.IsNullOrWhiteSpace(name)) + { + return false; + } + + if (name.Length > PipelineConstants.MaxNodeNameLength) + { + return false; + } + + if (!char.IsLetter(name[0])) + { + return false; + } + + foreach (var c in name) + { + if (!char.IsLetterOrDigit(c) && c != '_') + { + return false; + } + } + + return true; + } + + public static String GetOrchestrationInstanceId( + Guid planId, + String nodeIdentifier) + { + return PipelineUtilities.GetInstanceName(planId.ToString("D"), nodeIdentifier); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineValidationError.cs b/src/Sdk/DTPipelines/Pipelines/PipelineValidationError.cs new file mode 100644 index 00000000000..539913ebe10 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineValidationError.cs @@ -0,0 +1,60 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides information about an error which occurred during pipeline validation. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineValidationError + { + public PipelineValidationError() + { + } + + public PipelineValidationError(String message) + : this(null, message) + { + } + + public PipelineValidationError( + String code, + String message) + { + Code = code; + Message = message; + } + + [DataMember(EmitDefaultValue = false)] + public String Code + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Message + { + get; + set; + } + + public static IEnumerable Create(Exception exception) + { + for (int i = 0; i < 50; i++) + { + yield return new PipelineValidationError(exception.Message); + if (exception.InnerException == null) + { + break; + } + + exception = exception.InnerException; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PipelineValidationErrors.cs b/src/Sdk/DTPipelines/Pipelines/PipelineValidationErrors.cs new file mode 100644 index 00000000000..6ffed4043c0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PipelineValidationErrors.cs @@ -0,0 +1,99 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides information about an error which occurred during pipeline validation. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PipelineValidationErrors : IEnumerable + { + public PipelineValidationErrors() + { + } + + public PipelineValidationErrors( + Int32 maxErrors, + Int32 maxMessageLength) + { + m_maxErrors = maxErrors; + m_maxMessageLength = maxMessageLength; + } + + public Int32 Count => m_errors.Count; + + public void Add(String message) + { + Add(new PipelineValidationError(message)); + } + + public void Add(Exception ex) + { + Add(null, ex); + } + + public void Add(String messagePrefix, Exception ex) + { + for (int i = 0; i < 50; i++) + { + String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.Message; + Add(new PipelineValidationError(message)); + if (ex.InnerException == null) + { + break; + } + + ex = ex.InnerException; + } + } + + public void Add(IEnumerable errors) + { + foreach (var error in errors) + { + Add(error); + } + } + + public void Add(PipelineValidationError error) + { + // Check max errors + if (m_maxErrors <= 0 || + m_errors.Count < m_maxErrors) + { + // Check max message length + if (m_maxMessageLength > 0 && + error.Message?.Length > m_maxMessageLength) + { + error = new PipelineValidationError(error.Code, error.Message.Substring(0, m_maxMessageLength) + "[...]"); + } + + m_errors.Add(error); + } + } + + public void Clear() + { + m_errors.Clear(); + } + + public IEnumerator GetEnumerator() + { + return (m_errors as IEnumerable).GetEnumerator(); + } + + IEnumerator IEnumerable.GetEnumerator() + { + return (m_errors as IEnumerable).GetEnumerator(); + } + + private readonly List m_errors = new List(); + private readonly Int32 m_maxErrors; + private readonly Int32 m_maxMessageLength; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ProviderPhase.cs b/src/Sdk/DTPipelines/Pipelines/ProviderPhase.cs new file mode 100644 index 00000000000..1cc116a51ef --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ProviderPhase.cs @@ -0,0 +1,227 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ProviderPhase : PhaseNode + { + public ProviderPhase() + { + } + + private ProviderPhase(ProviderPhase phaseToCopy) + : base(phaseToCopy) + { + } + + /// + /// Gets the phase type. + /// + [DataMember(EmitDefaultValue = false)] + public override PhaseType Type => PhaseType.Provider; + + /// + /// Gets or sets the environment target for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public EnvironmentDeploymentTarget EnvironmentTarget + { + get; + set; + } + + /// + /// Gets or sets the provider for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public String Provider + { + get; + set; + } + + /// + /// Gets or sets the strategy for this phase. + /// + [DataMember(EmitDefaultValue = false)] + public Dictionary Strategy + { + get; + set; + } + + /// + /// Resolves external references and ensures the steps are compatible with the selected target. + /// + /// The validation context + public override void Validate( + PipelineBuildContext context, + ValidationResult result) + { + base.Validate(context, result); + + var provider = context.PhaseProviders.FirstOrDefault(x => String.Equals(x.Provider, this.Provider, StringComparison.OrdinalIgnoreCase)); + if (provider == null) + { + result.Errors.Add(new PipelineValidationError($"'{this.Provider}' phase '{this.Name}' is not supported.")); + } + else + { + var providerPhaseResult = provider.Validate(context, this); + if (providerPhaseResult != null) + { + foreach (var error in providerPhaseResult.Errors) + { + result.Errors.Add(error); + } + + result.ReferencedResources.MergeWith(providerPhaseResult.ReferencedResources); + + foreach (var endpointReference in providerPhaseResult.ReferencedResources.Endpoints) + { + var endpoint = context.ResourceStore.GetEndpoint(endpointReference); + if (endpoint == null) + { + result.UnauthorizedResources.AddEndpointReference(endpointReference); + } + } + + foreach (var fileReference in providerPhaseResult.ReferencedResources.Files) + { + var file = context.ResourceStore.GetFile(fileReference); + if (file == null) + { + result.UnauthorizedResources.AddSecureFileReference(fileReference); + } + } + + foreach (var queueReference in providerPhaseResult.ReferencedResources.Queues) + { + var queue = context.ResourceStore.GetQueue(queueReference); + if (queue == null) + { + result.UnauthorizedResources.AddAgentQueueReference(queueReference); + } + } + + foreach (var variableReference in providerPhaseResult.ReferencedResources.VariableGroups) + { + var variableGroup = context.ResourceStore.GetVariableGroup(variableReference); + if (variableGroup == null) + { + result.UnauthorizedResources.AddVariableGroupReference(variableReference); + } + } + } + } + + if (!(this.Target is AgentQueueTarget agentQueueTarget) || agentQueueTarget.IsLiteral()) + { + this.Target?.Validate(context, context.BuildOptions, result); + } + } + public JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + JobInstance jobInstance) + { + var jobContext = context.CreateJobContext(jobInstance); + jobContext.Job.Definition.Id = jobContext.GetInstanceId(); + + var options = new BuildOptions(); + var builder = new PipelineBuilder(context); + var result = builder.GetReferenceResources(jobInstance.Definition.Steps.OfType().ToList(), jobInstance.Definition.Target); + jobContext.ReferencedResources.MergeWith(result); + + // Update the execution context with the job-specific system variables + UpdateJobContextVariablesFromJob(jobContext, jobInstance.Definition); + + return jobContext; + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class ProviderPhaseRequest + { + [DataMember(IsRequired = true)] + public Guid PlanId { get; set; } + + [DataMember(IsRequired = true)] + public String PlanType { get; set; } + + [DataMember(IsRequired = true)] + public Guid ServiceOwner { get; set; } + + [DataMember(IsRequired = true)] + public String PhaseOrchestrationId { get; set; } + + [DataMember(EmitDefaultValue = false)] + public ProviderPhase ProviderPhase { get; set; } + + [DataMember(EmitDefaultValue = false)] + public ProjectReference Project { get; set; } + + [DataMember(EmitDefaultValue = false)] + public TaskOrchestrationOwner Pipeline { get; set; } + + [DataMember(EmitDefaultValue = false)] + public TaskOrchestrationOwner Run { get; set; } + + [DataMember(EmitDefaultValue = false)] + public PipelineGraphNodeReference Stage { get; set; } + + [DataMember(EmitDefaultValue = false)] + public PipelineGraphNodeReference Phase { get; set; } + + [DataMember(EmitDefaultValue = false)] + public IDictionary Variables { get; set; } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class PipelineGraphNodeReference + { + public PipelineGraphNodeReference() + { + } + + public PipelineGraphNodeReference(String id, String name, Int32 attempt = 0) + { + this.Id = id; + this.Name = name; + this.Attempt = attempt; + } + + public PipelineGraphNodeReference(Guid id, String name, Int32 attempt = 0) + { + this.Id = id.ToString("D"); + this.Name = name; + this.Attempt = attempt; + } + + public PipelineGraphNodeReference(Int32 id, String name, Int32 attempt = 0) + { + this.Id = id.ToString(); + this.Name = name; + this.Attempt = attempt; + } + + [DataMember(IsRequired = true)] + public String Id { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + [DataMember(EmitDefaultValue = false)] + public Int32 Attempt { get; set; } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/PullRequestTrigger.cs b/src/Sdk/DTPipelines/Pipelines/PullRequestTrigger.cs new file mode 100644 index 00000000000..91af1aba88a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/PullRequestTrigger.cs @@ -0,0 +1,83 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class PullRequestTrigger : PipelineTrigger + { + public PullRequestTrigger() + : base(PipelineTriggerType.PullRequest) + { + Enabled = true; + AutoCancel = true; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean Enabled + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean AutoCancel + { + get; + set; + } + + /// + /// A list of filters that describe which branches will trigger pipelines. + /// + public IList BranchFilters + { + get + { + if (m_branchFilters == null) + { + m_branchFilters = new List(); + } + return m_branchFilters; + } + } + + /// + /// A list of filters that describe which paths will trigger pipelines. + /// + public IList PathFilters + { + get + { + if (m_pathFilters == null) + { + m_pathFilters = new List(); + } + return m_pathFilters; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_branchFilters?.Count == 0) + { + m_branchFilters = null; + } + + if (m_pathFilters?.Count == 0) + { + m_pathFilters = null; + } + } + + [DataMember(Name = "BranchFilters", EmitDefaultValue = false)] + private List m_branchFilters; + + [DataMember(Name = "PathFilters", EmitDefaultValue = false)] + private List m_pathFilters; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/RepositoryResource.cs b/src/Sdk/DTPipelines/Pipelines/RepositoryResource.cs new file mode 100644 index 00000000000..5a70cfa80b4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/RepositoryResource.cs @@ -0,0 +1,166 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class RepositoryPropertyNames + { + public static readonly String Id = "id"; + public static readonly String Mappings = "mappings"; + public static readonly String Name = "name"; + public static readonly String Ref = "ref"; + public static readonly String Type = "type"; + public static readonly String Url = "url"; + public static readonly String Version = "version"; + public static readonly String VersionInfo = "versionInfo"; + public static readonly String VersionSpec = "versionSpec"; + public static readonly String Shelveset = "shelveset"; + public static readonly String Project = "project"; + public static readonly String Path = "path"; + public static readonly String CheckoutOptions = "checkoutOptions"; + public static readonly String DefaultBranch = "defaultBranch"; + public static readonly String ExternalId = "externalId"; + public static readonly String IsJustInTimeRepository = "isJustInTimeRepository"; + } + + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class VersionInfo + { + [DataMember(EmitDefaultValue = false)] + public String Author { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Message { get; set; } + } + + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class CheckoutOptions + { + [JsonConstructor] + public CheckoutOptions() + { } + + private CheckoutOptions(CheckoutOptions optionsToCopy) + { + this.Clean = optionsToCopy.Clean; + this.FetchDepth = optionsToCopy.FetchDepth; + this.Lfs = optionsToCopy.Lfs; + this.Submodules = optionsToCopy.Submodules; + this.PersistCredentials = optionsToCopy.PersistCredentials; + } + + [DataMember(EmitDefaultValue = false)] + public String Clean{ get; set; } + + [DataMember(EmitDefaultValue = false)] + public String FetchDepth{ get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Lfs { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String Submodules { get; set; } + + [DataMember(EmitDefaultValue = false)] + public String PersistCredentials { get; set; } + + public CheckoutOptions Clone() + { + return new CheckoutOptions(this); + } + } + + /// + /// Provides a data contract for a repository resource referenced by a pipeline. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class RepositoryResource : Resource + { + /// + /// Initializes a new RepositoryReference instance with default values. + /// + public RepositoryResource() + { + } + + private RepositoryResource(RepositoryResource referenceToCopy) + : base(referenceToCopy) + { + } + + /// + /// Gets or sets a unique identifier for this repository. + /// + public String Id + { + get + { + return this.Properties.Get(RepositoryPropertyNames.Id); + } + set + { + this.Properties.Set(RepositoryPropertyNames.Id, value); + } + } + + /// + /// Gets or sets the type of repository. + /// + public String Type + { + get + { + return this.Properties.Get(RepositoryPropertyNames.Type); + } + set + { + this.Properties.Set(RepositoryPropertyNames.Type, value); + } + } + + /// + /// Gets or sets the url of the repository. + /// + public Uri Url + { + get + { + return this.Properties.Get(RepositoryPropertyNames.Url); + } + set + { + this.Properties.Set(RepositoryPropertyNames.Url, value); + } + } + + /// + /// Gets or sets the version of the repository. + /// + public String Version + { + get + { + return this.Properties.Get(RepositoryPropertyNames.Version); + } + set + { + this.Properties.Set(RepositoryPropertyNames.Version, value); + } + } + + /// + /// Creates a clone of the current repository instance. + /// + /// A new RepositoryReference instance which is a copy of the current instance + public RepositoryResource Clone() + { + return new RepositoryResource(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/RepositoryTypes.cs b/src/Sdk/DTPipelines/Pipelines/RepositoryTypes.cs new file mode 100644 index 00000000000..e0f46ea5ce1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/RepositoryTypes.cs @@ -0,0 +1,15 @@ +using System; + +namespace GitHub.DistributedTask.Pipelines +{ + public static class RepositoryTypes + { + public static readonly String Bitbucket = nameof(Bitbucket); + public static readonly String ExternalGit = nameof(ExternalGit); + public static readonly String Git = nameof(Git); + public static readonly String GitHub = nameof(GitHub); + public static readonly String GitHubEnterprise = nameof(GitHubEnterprise); + public static readonly String Tfvc = nameof(Tfvc); + public static readonly String Svn = nameof(Svn); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Resource.cs b/src/Sdk/DTPipelines/Pipelines/Resource.cs new file mode 100644 index 00000000000..2d7c5d79432 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Resource.cs @@ -0,0 +1,72 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class Resource + { + /// + /// Initializes a new Resource instance with default properties. + /// + protected Resource() + { + } + + protected Resource(Resource resourceToCopy) + { + this.Alias = resourceToCopy.Alias; + this.Endpoint = resourceToCopy.Endpoint?.Clone(); + m_properties = resourceToCopy.m_properties?.Clone(); + } + + /// + /// Gets or sets the name of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public String Alias + { + get; + set; + } + + /// + /// Gets or sets an optional endpoint used for connecting to the resource. + /// + [DataMember(EmitDefaultValue = false)] + public ServiceEndpointReference Endpoint + { + get; + set; + } + + /// + /// Gets the extended properties set on the resource. + /// + public ResourceProperties Properties + { + get + { + if (m_properties == null) + { + m_properties = new ResourceProperties(); + } + return m_properties; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_properties?.Count == 0) + { + m_properties = null; + } + } + + [DataMember(Name = "Properties", EmitDefaultValue = false)] + private ResourceProperties m_properties; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ResourceComparer.cs b/src/Sdk/DTPipelines/Pipelines/ResourceComparer.cs new file mode 100644 index 00000000000..76aa7eabed7 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ResourceComparer.cs @@ -0,0 +1,20 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.Pipelines +{ + internal sealed class ResourceComparer : IEqualityComparer + { + public Boolean Equals( + Resource x, + Resource y) + { + return String.Equals(x?.Alias, y?.Alias, StringComparison.OrdinalIgnoreCase); + } + + public Int32 GetHashCode(Resource obj) + { + return obj?.Alias?.GetHashCode() ?? 0; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ResourceProperties.cs b/src/Sdk/DTPipelines/Pipelines/ResourceProperties.cs new file mode 100644 index 00000000000..3abdf7abfbf --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ResourceProperties.cs @@ -0,0 +1,256 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Linq; +using System.Reflection; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for getting and setting resource properties. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [JsonConverter(typeof(ResourcePropertiesJsonConverter))] + public class ResourceProperties + { + public ResourceProperties() + { + } + + internal ResourceProperties(IDictionary items) + { + m_items = new Dictionary(items, StringComparer.OrdinalIgnoreCase); + } + + private ResourceProperties(ResourceProperties propertiesToClone) + { + if (propertiesToClone?.m_items?.Count > 0) + { + m_items = new Dictionary(propertiesToClone.m_items, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Gets the count of properties defined. + /// + public Int32 Count + { + get + { + return m_items?.Count ?? 0; + } + } + + internal IDictionary Items + { + get + { + if (m_items == null) + { + m_items = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_items; + } + } + + public IReadOnlyDictionary GetItems() + { + return new ReadOnlyDictionary(this.Items); + } + + public ResourceProperties Clone() + { + return new ResourceProperties(this); + } + + public Boolean Delete(String name) + { + return this.Items.Remove(name); + } + + public Boolean DeleteAllExcept(ISet names) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(names, nameof(names)); + + Boolean removed = false; + if (m_items?.Count > 0) + { + foreach (var propertyName in m_items.Keys.Where(x => !names.Contains(x)).ToArray()) + { + removed |= Delete(propertyName); + } + } + + return removed; + } + + public T Get( + String name, + T defaultValue = default(T)) + { + if (this.Items.TryGetValue(name, out var tokenValue) && tokenValue != null) + { + if (typeof(T) == typeof(JToken)) + { + return (T)(Object)tokenValue; + } + else + { + return tokenValue.ToObject(s_serializer); + } + } + + return defaultValue; + } + + public Boolean TryGetValue(String name, out T value) + { + if (this.Items.TryGetValue(name, out var tokenValue) && tokenValue != null) + { + if (typeof(T) == typeof(JToken)) + { + value = (T)(Object)tokenValue; + } + else + { + value = tokenValue.ToObject(s_serializer); + } + + return true; + } + else + { + value = default(T); + return false; + } + } + + public void Set( + String name, + T value) + { + if (value == null) + { + this.Items[name] = null; + } + else if (typeof(T) == typeof(JToken)) + { + this.Items[name] = value as JToken; + } + else + { + this.Items[name] = JToken.FromObject(value, s_serializer); + } + } + + public void UnionWith( + ResourceProperties properties, + Boolean overwrite = false) + { + if (properties?.m_items == null) + { + return; + } + + foreach (var property in properties.m_items) + { + if (overwrite || !this.Items.ContainsKey(property.Key)) + { + this.Items[property.Key] = property.Value; + } + } + } + + internal IDictionary ToStringDictionary() + { + return this.Items.ToDictionary(x => x.Key, x => ToObject(x.Value), StringComparer.OrdinalIgnoreCase); + } + + private static Object ToObject(JToken token) + { + switch (token.Type) + { + case JTokenType.Boolean: + return Convert.ToString((Boolean)token); + case JTokenType.Date: + return Convert.ToString((DateTime)token); + case JTokenType.Float: + return Convert.ToString((Single)token); + case JTokenType.Guid: + return Convert.ToString((Guid)token); + case JTokenType.Integer: + return Convert.ToString((Int32)token); + case JTokenType.TimeSpan: + return Convert.ToString((TimeSpan)token); + case JTokenType.Uri: + return Convert.ToString((Uri)token); + case JTokenType.String: + return (String)token; + + case JTokenType.Array: + var array = token as JArray; + return array.Select(x => ToObject(x)).ToList(); + + case JTokenType.Object: + return ToDictionary(token as JObject); + } + + return null; + } + + private static IDictionary ToDictionary(JObject @object) + { + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var item in @object.Properties()) + { + result[item.Name] = ToObject(item.Value); + } + return result; + } + + private IDictionary m_items; + private static readonly JsonSerializer s_serializer = JsonUtility.CreateJsonSerializer(); + } + + internal class ResourcePropertiesJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return true; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(IDictionary).GetTypeInfo().IsAssignableFrom(objectType); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + var items = serializer.Deserialize>(reader); + return new ResourceProperties(items); + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + + var properties = (ResourceProperties)value; + serializer.Serialize(writer, properties?.Items); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ResourceReference.cs b/src/Sdk/DTPipelines/Pipelines/ResourceReference.cs new file mode 100644 index 00000000000..0462d76c65f --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ResourceReference.cs @@ -0,0 +1,56 @@ +using Newtonsoft.Json; +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a base set of properties common to all pipeline resource types. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class ResourceReference + { + protected ResourceReference() + { + } + + protected ResourceReference(ResourceReference referenceToCopy) + { + this.Name = referenceToCopy.Name; + } + + /// + /// Gets or sets the name of the referenced resource. + /// + [DataMember(EmitDefaultValue = false)] + [JsonConverter(typeof(ExpressionValueJsonConverter))] + public ExpressionValue Name + { + get; + set; + } + + public override String ToString() + { + var name = this.Name; + if (name != null) + { + var s = name.Literal; + if (!String.IsNullOrEmpty(s)) + { + return s; + } + + s = name.Expression; + if (!String.IsNullOrEmpty(s)) + { + return s; + } + } + + return null; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ResourceStore.cs b/src/Sdk/DTPipelines/Pipelines/ResourceStore.cs new file mode 100644 index 00000000000..c5ce4ba7c6a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ResourceStore.cs @@ -0,0 +1,666 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Orchestration.Server.Artifacts; +using GitHub.DistributedTask.Pipelines.Artifacts; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a default implementation of a resource store. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ResourceStore : IResourceStore + { + /// + /// Initializes a new ResourceStore instance with no resources. + /// + public ResourceStore() + : this(endpoints: null) + { + } + + /// + /// Initializes a new ResourceStore instance with the specified resources. If aliases are provided, + /// an alias overrides lookup by name for the specified resource. + /// + /// The collection of endpoints available in the store + /// The collection of secure files available in the store + /// The collection of agent queues available in the store + /// The collection of variable groups available in the store + public ResourceStore( + IList endpoints = null, + IList files = null, + IList queues = null, + IList variableGroups = null, + IList builds = null, + IList containers = null, + IList repositories = null, + IList pipelines = null, + IList pools = null) + : this(new ServiceEndpointStore(endpoints), new SecureFileStore(files), new AgentQueueStore(queues), new VariableGroupStore(variableGroups), new BuildResourceStore(builds), new ContainerResourceStore(containers), new RepositoryResourceStore(repositories), new PipelineResourceStore(pipelines), new AgentPoolStore(pools), new EnvironmentStore(null)) + { + } + + /// + /// Initializes a new ResourceStore instance with the specified resources and endpoint store. If + /// aliases are provided, an alias overrides lookup by name for the specified resource. + /// + /// The store for retrieving referenced service endpoints + /// The store for retrieving referenced secure files + /// The store for retrieving referenced agent queues + /// The store for retrieving reference variable groups + public ResourceStore( + IServiceEndpointStore endpointStore = null, + ISecureFileStore fileStore = null, + IAgentQueueStore queueStore = null, + IVariableGroupStore variableGroupStore = null, + IBuildStore buildStore = null, + IContainerStore containerStore = null, + IRepositoryStore repositoryStore = null, + IPipelineStore pipelineStore = null, + IAgentPoolStore poolStore = null, + IEnvironmentStore environmentStore = null) + { + this.Builds = buildStore ?? new BuildResourceStore(null); + this.Containers = containerStore ?? new ContainerResourceStore(null); + this.Endpoints = endpointStore ?? new ServiceEndpointStore(null); + this.Files = fileStore ?? new SecureFileStore(null); + this.Pipelines = pipelineStore ?? new PipelineResourceStore(null); + this.Queues = queueStore ?? new AgentQueueStore(null); + this.Pools = poolStore ?? new AgentPoolStore(null); + this.Repositories = repositoryStore ?? new RepositoryResourceStore(null); + this.VariableGroups = variableGroupStore ?? new VariableGroupStore(null); + this.Environments = environmentStore ?? new EnvironmentStore(null); + } + + /// + /// Gets the store used for retrieving build resources. + /// + public IBuildStore Builds + { + get; + } + + /// + /// Gets the store used for retrieving container resources. + /// + public IContainerStore Containers + { + get; + } + + /// + /// Gets the store used for retrieving service endpoints. + /// + public IServiceEndpointStore Endpoints + { + get; + } + + /// + /// Gets the store used for retrieving environment. + /// + public IEnvironmentStore Environments + { + get; + } + + /// + /// Gets the store used for retrieving secure files. + /// + public ISecureFileStore Files + { + get; + } + + /// + /// Get the store used for retrieving pipelines. + /// + public IPipelineStore Pipelines + { + get; + } + + /// + /// Gets the store used for retrieving agent queues. + /// + public IAgentQueueStore Queues + { + get; + } + + /// + /// Gets the store used for retrieving agent pools. + /// + public IAgentPoolStore Pools + { + get; + } + + /// + /// Gets the store used for retrieving repository resources. + /// + public IRepositoryStore Repositories + { + get; + } + + /// + /// Gets the store used for retrieving variable groups. + /// + public IVariableGroupStore VariableGroups + { + get; + } + + /// + /// Gets all resources currently in the resource store. + /// + /// + public PipelineResources GetAuthorizedResources() + { + var resources = new PipelineResources(); + resources.Builds.AddRange(this.Builds.GetAll()); + resources.Containers.AddRange(this.Containers.GetAll()); + resources.Endpoints.AddRange(this.Endpoints.GetAuthorizedReferences()); + resources.Files.AddRange(this.Files.GetAuthorizedReferences()); + resources.Pipelines.AddRange(this.Pipelines.GetAll()); + resources.Queues.AddRange(this.Queues.GetAuthorizedReferences()); + resources.Pools.AddRange(this.Pools.GetAuthorizedReferences()); + resources.Repositories.AddRange(this.Repositories.GetAll()); + resources.VariableGroups.AddRange(this.VariableGroups.GetAuthorizedReferences()); + resources.Environments.AddRange(this.Environments.GetReferences()); + return resources; + } + + /// + /// Gets the steps, if any, which should be inserted into the job based on the resources configured. + /// + /// The execution context + /// The current set of steps for the job + /// A list of steps which should be prepended to the job + public IList GetPreSteps( + IPipelineContext context, + IReadOnlyList steps) + { + var allSteps = new List(); + if (context.EnvironmentVersion > 1 && context is PipelineExecutionContext) + { + // Variable group steps are always set first in case the other steps depend on the values + allSteps.AddRangeIfRangeNotNull(this.VariableGroups.GetPreSteps(context, steps)); + + // Now just do the remaining resources in alphabetical order + allSteps.AddRangeIfRangeNotNull(this.Builds.GetPreSteps(context, steps)); + allSteps.AddRangeIfRangeNotNull(this.Repositories.GetPreSteps(context, steps)); + allSteps.AddRangeIfRangeNotNull(this.Pipelines.GetPreSteps(context, steps)); + } + + return allSteps; + } + + /// + /// Get steps that run after the checkout task. + /// + /// The execution context + /// + /// + public Dictionary> GetPostTaskSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new Dictionary>(); + } + + /// + /// Get steps that are run after all other steps. + /// + /// The execution context + /// + public IList GetPostSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new List(); + } + + public ServiceEndpoint GetEndpoint(Guid endpointId) + { + return this.Endpoints.Get(new ServiceEndpointReference { Id = endpointId }); + } + + public ServiceEndpoint GetEndpoint(String endpointId) + { + ServiceEndpoint endpoint = null; + if (Guid.TryParse(endpointId, out Guid endpointIdValue)) + { + endpoint = GetEndpoint(endpointIdValue); + } + + if (endpoint == null) + { + endpoint = this.Endpoints.Get(new ServiceEndpointReference { Name = endpointId }); + } + + return endpoint; + } + + public SecureFile GetFile(Guid fileId) + { + return this.Files.Get(new SecureFileReference { Id = fileId }); + } + + public SecureFile GetFile(String fileId) + { + SecureFile file = null; + if (Guid.TryParse(fileId, out Guid fileIdValue)) + { + file = GetFile(fileIdValue); + } + + if (file == null) + { + file = this.Files.Get(new SecureFileReference { Name = fileId }); + } + + return file; + } + + public TaskAgentQueue GetQueue(Int32 queueId) + { + return this.Queues.Get(new AgentQueueReference { Id = queueId }); + } + + public TaskAgentQueue GetQueue(String queueId) + { + TaskAgentQueue queue = null; + if (Int32.TryParse(queueId, out Int32 queueIdValue)) + { + queue = GetQueue(queueIdValue); + } + + if (queue == null) + { + queue = this.Queues.Get(new AgentQueueReference { Name = queueId }); + } + + return queue; + } + + public TaskAgentPool GetPool(Int32 poolId) + { + return this.Pools.Get(new AgentPoolReference { Id = poolId }); + } + + public TaskAgentPool GetPool(String poolName) + { + return this.Pools.Get(new AgentPoolReference { Name = poolName }); + } + + public VariableGroup GetVariableGroup(Int32 groupId) + { + return this.VariableGroups.Get(new VariableGroupReference { Id = groupId }); + } + + public VariableGroup GetVariableGroup(String groupId) + { + VariableGroup variableGroup = null; + if (Int32.TryParse(groupId, out Int32 groupIdValue)) + { + variableGroup = GetVariableGroup(groupIdValue); + } + + if (variableGroup == null) + { + variableGroup = this.VariableGroups.Get(new VariableGroupReference { Name = groupId }); + } + + return variableGroup; + } + + public Boolean ResolveStep( + IPipelineContext context, + JobStep step, + out IList resolvedSteps) + { + resolvedSteps = new List(); + if (context.EnvironmentVersion > 1 && context is PipelineExecutionContext) + { + return this.Pipelines.ResolveStep(context, step, out resolvedSteps); + } + + return false; + } + } + + public abstract class InMemoryResourceStore where T : Resource + { + protected InMemoryResourceStore(IEnumerable resources) + { + m_resources = resources?.ToDictionary(x => x.Alias, x => x, StringComparer.OrdinalIgnoreCase) ?? new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + public Int32 Count => m_resources.Count; + + public void Add(T resource) + { + m_resources.Add(resource.Alias, resource); + } + + public void Add(IEnumerable resources) + { + foreach (var resource in resources) + { + m_resources.Add(resource.Alias, resource); + } + } + + public T Get(String alias) + { + if (m_resources.TryGetValue(alias, out T resource)) + { + return resource; + } + + return null; + } + + public IEnumerable GetAll() + { + return m_resources.Values.ToList(); + } + + private Dictionary m_resources; + } + + public class BuildResourceStore : InMemoryResourceStore, IBuildStore + { + public BuildResourceStore( + IEnumerable builds, + IArtifactResolver resolver = null) + : base(builds) + { + this.Resolver = resolver; + } + + public BuildResourceStore(params BuildResource[] builds) + : base(builds) + { + } + + public IArtifactResolver Resolver { get; } + + public IList GetPreSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return null; + } + + public Dictionary> GetPostTaskSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new Dictionary>(); + } + + public IList GetPostSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new List(); + } + + public Boolean ResolveStep( + IPipelineContext context, + JobStep step, + out IList resolvedSteps) + { + resolvedSteps = new List(); + return false; + } + } + + public class ContainerResourceStore : InMemoryResourceStore, IContainerStore + { + public ContainerResourceStore(IEnumerable containers) + : base(containers) + { + } + + public ContainerResourceStore(params ContainerResource[] containers) + : base(containers) + { + } + + public bool ResolveStep( + IPipelineContext context, + JobStep step, + out IList resolvedSteps) + { + resolvedSteps = new List(); + return false; + } + } + + public class PipelineResourceStore : InMemoryResourceStore, IPipelineStore + { + public PipelineResourceStore( + IEnumerable pipelines, + IArtifactResolver artifactResolver = null, + Boolean isEnabled = false, + Boolean useSystemStepsDecorator = false) + : base(pipelines) + { + this.m_artifactResolver = artifactResolver; + this.m_isEnabled = isEnabled; + this.m_useSystemStepsDecorator = useSystemStepsDecorator; + } + + public IList GetPreSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new List(); + } + + public Dictionary> GetPostTaskSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new Dictionary>(); + } + + public IList GetPostSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new List(); + } + + public Boolean ResolveStep( + IPipelineContext context, + JobStep step, + out IList resolvedSteps) + { + resolvedSteps = new List(); + + if (step.IsDownloadTask()) + { + if (!m_isEnabled) + { + // The pre step decorator can't check the FF state. It always adds a download step for a current pipeline. + // To make sure we aren't failing all the existing pipelines, if the DownloadStep FF is not enabled we will return as resolved with empty resolved steps. + return true; + } + + return m_artifactResolver?.ResolveStep(context, step, out resolvedSteps) ?? false; + } + + return false; + } + + private IArtifactResolver m_artifactResolver; + private Boolean m_isEnabled; + private Boolean m_useSystemStepsDecorator; + } + + public class RepositoryResourceStore : InMemoryResourceStore, IRepositoryStore + { + public RepositoryResourceStore(IEnumerable repositories) + : this(repositories, false, false) + { + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public RepositoryResourceStore( + IEnumerable repositories, + Boolean useSystemStepsDecorator, + Boolean includeCheckoutOptions) + : base(repositories) + { + m_useSystemStepsDecorator = useSystemStepsDecorator; + m_includeCheckoutOptions = includeCheckoutOptions; + } + + public IList GetPreSteps( + IPipelineContext context, + IReadOnlyList steps) + { + // If the environment version is 1 we should not inject + if (context.EnvironmentVersion < 2) + { + return null; + } + + var executionContext = context as JobExecutionContext; + if (context is JobExecutionContext jobContext && (jobContext.Phase.Definition as Phase)?.Target.Type != PhaseTargetType.Queue) + { + // only inject checkout step for agent phase + return null; + } + + // Check feature flag DistributedTask.IncludeCheckoutOptions. + // Controls whether the checkout options are merged into the task inputs, + // or whether the checkout task does. + if (!m_includeCheckoutOptions) + { + // Populate default checkout option from repository into task's inputs + foreach (var checkoutTask in steps.Where(x => x.IsCheckoutTask()).OfType()) + { + var repository = Get(checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Repository]); + if (repository != null && repository.Properties.TryGetValue(RepositoryPropertyNames.CheckoutOptions, out CheckoutOptions checkoutOptions)) + { + MergeCheckoutOptions(checkoutOptions, checkoutTask); + } + } + } + + // Check feature flag DistributedTask.YamlSystemStepsDecorator. + // Controls whether to load the checkout step from a YAML template. + if (m_useSystemStepsDecorator) + { + return null; + } + + var selfRepo = Get(PipelineConstants.SelfAlias); + if (selfRepo == null) + { + // self repository doesn't existing, no needs to inject checkout task. + // self repo is for yaml only, designer build should always provide checkout task + return null; + } + else + { + // If any steps contains checkout task, we will not inject checkout task + if (steps.Any(x => x.IsCheckoutTask())) + { + return null; + } + else + { + //Inject checkout:self task + var checkoutTask = new TaskStep() + { + Enabled = true, + DisplayName = PipelineConstants.CheckoutTask.FriendlyName, + Reference = new TaskStepDefinitionReference() + { + Id = PipelineConstants.CheckoutTask.Id, + Version = PipelineConstants.CheckoutTask.Version, + Name = PipelineConstants.CheckoutTask.Name + } + }; + + checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Repository] = selfRepo.Alias; + if (selfRepo.Properties.TryGetValue(RepositoryPropertyNames.CheckoutOptions, out CheckoutOptions checkoutOptions)) + { + MergeCheckoutOptions(checkoutOptions, checkoutTask); + } + + return new[] { checkoutTask }; + } + } + } + + public Dictionary> GetPostTaskSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new Dictionary>(); + } + + public IList GetPostSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new List(); + } + + private void MergeCheckoutOptions( + CheckoutOptions checkoutOptions, + TaskStep checkoutTask) + { + if (!checkoutTask.Inputs.ContainsKey(PipelineConstants.CheckoutTaskInputs.Clean) && !String.IsNullOrEmpty(checkoutOptions.Clean)) + { + checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Clean] = checkoutOptions.Clean; + } + + if (!checkoutTask.Inputs.ContainsKey(PipelineConstants.CheckoutTaskInputs.FetchDepth) && !String.IsNullOrEmpty(checkoutOptions.FetchDepth)) + { + checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.FetchDepth] = checkoutOptions.FetchDepth; + } + + if (!checkoutTask.Inputs.ContainsKey(PipelineConstants.CheckoutTaskInputs.Lfs) && !String.IsNullOrEmpty(checkoutOptions.Lfs)) + { + checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Lfs] = checkoutOptions.Lfs; + } + + if (!checkoutTask.Inputs.ContainsKey(PipelineConstants.CheckoutTaskInputs.PersistCredentials) && !String.IsNullOrEmpty(checkoutOptions.PersistCredentials)) + { + checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.PersistCredentials] = checkoutOptions.PersistCredentials; + } + + if (!checkoutTask.Inputs.ContainsKey(PipelineConstants.CheckoutTaskInputs.Submodules) && !String.IsNullOrEmpty(checkoutOptions.Submodules)) + { + checkoutTask.Inputs[PipelineConstants.CheckoutTaskInputs.Submodules] = checkoutOptions.Submodules; + } + } + + public Boolean ResolveStep( + IPipelineContext context, + JobStep step, + out IList resolvedSteps) + { + resolvedSteps = new List(); + return false; + } + + private Boolean m_useSystemStepsDecorator; + private Boolean m_includeCheckoutOptions; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/AgentJobStartedData.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/AgentJobStartedData.cs new file mode 100644 index 00000000000..a8a68681d28 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/AgentJobStartedData.cs @@ -0,0 +1,17 @@ +using System; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [DataContract] + public sealed class AgentJobStartedData + { + [DataMember(EmitDefaultValue = false)] + public TaskAgentReference ReservedAgent + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/GraphExecutionContext.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/GraphExecutionContext.cs new file mode 100644 index 00000000000..31c9b4a2f56 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/GraphExecutionContext.cs @@ -0,0 +1,46 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class GraphExecutionContext : PipelineExecutionContext where TInstance : IGraphNodeInstance + { + private protected GraphExecutionContext(GraphExecutionContext context) + : base(context) + { + this.Node = context.Node; + } + + private protected GraphExecutionContext( + TInstance node, + PipelineState state, + DictionaryContextData data, + ICounterStore counterStore, + IPackageStore packageStore, + IResourceStore resourceStore, + ITaskStore taskStore, + IList stepProviders, + IPipelineIdGenerator idGenerator = null, + IPipelineTraceWriter trace = null, + EvaluationOptions expressionOptions = null, + ExecutionOptions executionOptions = null) + : base(data, counterStore, packageStore, resourceStore, taskStore, stepProviders, state, idGenerator, trace, expressionOptions, executionOptions) + { + ArgumentUtility.CheckForNull(node, nameof(node)); + + this.Node = node; + } + + /// + /// Gets the target node for this execution context. + /// + protected TInstance Node + { + get; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/GraphNodeInstance.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/GraphNodeInstance.cs new file mode 100644 index 00000000000..c2e65be20d1 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/GraphNodeInstance.cs @@ -0,0 +1,144 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [DataContract] + public abstract class GraphNodeInstance : IGraphNodeInstance where TNode : IGraphNode + { + private protected GraphNodeInstance() + { + this.Attempt = 1; + } + + private protected GraphNodeInstance( + String name, + Int32 attempt, + TNode definition, + TaskResult result) + { + this.Name = name; + this.Attempt = attempt; + this.Definition = definition; + this.State = PipelineState.NotStarted; + this.Result = result; + } + + /// + /// Gets or sets the identifier of the node. + /// + [DataMember(EmitDefaultValue = false)] + public String Identifier + { + get; + set; + } + + /// + /// Gets or sets the name of the node. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32 Attempt + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime? StartTime + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + set; + } + + /// + /// Gets the collection of output variables emitted by the instance. + /// + public IDictionary Outputs + { + get + { + if (m_outputs == null) + { + m_outputs = new VariablesDictionary(); + } + return m_outputs; + } + } + + [DataMember(EmitDefaultValue = false)] + public PipelineState State + { + get; + set; + } + + /// + /// Gets or sets the result of the instance. + /// + [DataMember(EmitDefaultValue = false)] + public TaskResult? Result + { + get; + set; + } + + /// + /// Gets the structure defined in the pipeline definition. + /// + public TNode Definition + { + get; + internal set; + } + + /// + /// Gets the timeline record for this instance. + /// + internal TimelineRecord Record + { + get; + } + + public Boolean SecretsAccessed + { + get + { + return m_outputs?.SecretsAccessed.Count > 0; + } + } + + public void ResetSecretsAccessed() + { + m_outputs?.SecretsAccessed.Clear(); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_outputs?.Count == 0) + { + m_outputs = null; + } + } + + [DataMember(Name = "Outputs", EmitDefaultValue = false)] + private VariablesDictionary m_outputs; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/JobAttempt.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/JobAttempt.cs new file mode 100644 index 00000000000..bda09e88adb --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/JobAttempt.cs @@ -0,0 +1,14 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class JobAttempt + { + public JobInstance Job + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/JobExecutionContext.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/JobExecutionContext.cs new file mode 100644 index 00000000000..d29c3cb51f6 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/JobExecutionContext.cs @@ -0,0 +1,113 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class JobExecutionContext : PipelineExecutionContext + { + public JobExecutionContext( + PipelineState state, + IPipelineIdGenerator idGenerator = null) + : base(null, null, null, null, null, null, state, idGenerator) + { + } + + public JobExecutionContext( + PhaseExecutionContext context, + JobInstance job, + IDictionary variables, + Int32 positionInPhase = default, + Int32 totalJobsInPhase = default, + IDictionary data = default) + : base(context) + { + this.Stage = context.Stage; + this.Phase = context.Phase; + this.Job = job; + + // Make sure the identifier is properly set + this.Job.Identifier = this.IdGenerator.GetJobIdentifier(this.Stage?.Name, this.Phase.Name, this.Job.Name); + + if (job.Definition?.Variables?.Count > 0) + { + SetUserVariables(job.Definition.Variables.OfType()); + } + + SetSystemVariables(variables); + + // Add the attempt information into the context + var systemVariables = new List + { + new Variable + { + Name = WellKnownDistributedTaskVariables.JobIdentifier, + Value = job.Identifier + }, + new Variable + { + Name = WellKnownDistributedTaskVariables.JobAttempt, + Value = job.Attempt.ToString() + }, + }; + + if (positionInPhase != default) + { + systemVariables.Add(new Variable + { + Name = WellKnownDistributedTaskVariables.JobPositionInPhase, + Value = positionInPhase.ToString() + }); + } + + if (totalJobsInPhase != default) + { + systemVariables.Add(new Variable + { + Name = WellKnownDistributedTaskVariables.TotalJobsInPhase, + Value = totalJobsInPhase.ToString() + }); + } + + SetSystemVariables(systemVariables); + + if (String.IsNullOrEmpty(this.ExecutionOptions.SystemTokenScope) && + this.Variables.TryGetValue(WellKnownDistributedTaskVariables.AccessTokenScope, out VariableValue tokenScope)) + { + this.ExecutionOptions.SystemTokenScope = tokenScope?.Value; + } + + if (data?.Count > 0) + { + foreach (var pair in data) + { + Data[pair.Key] = pair.Value; + } + } + } + + public StageInstance Stage + { + get; + } + + public PhaseInstance Phase + { + get; + } + + public JobInstance Job + { + get; + } + + internal override String GetInstanceName() + { + return this.IdGenerator.GetJobInstanceName(this.Stage?.Name, this.Phase.Name, this.Job.Name, this.Job.Attempt); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/JobInstance.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/JobInstance.cs new file mode 100644 index 00000000000..45d12e8fab0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/JobInstance.cs @@ -0,0 +1,124 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public sealed class JobInstance + { + public JobInstance() + : this(String.Empty) + { + } + + public JobInstance(String name) + : this(name, 1) + { + } + + public JobInstance( + String name, + Int32 attempt) + { + this.Name = name; + this.Attempt = attempt; + } + + public JobInstance( + String name, + TaskResult result) + : this(name) + { + this.Result = result; + } + + public JobInstance(Job job) + : this(job, 1) + { + } + + public JobInstance( + Job job, + Int32 attempt) + : this(job.Name, attempt) + { + this.Definition = job; + this.State = PipelineState.NotStarted; + } + + [DataMember] + public String Identifier + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public Int32 Attempt + { + get; + set; + } + + [DataMember] + public DateTime? StartTime + { + get; + set; + } + + [DataMember] + public DateTime? FinishTime + { + get; + set; + } + + [DataMember] + public PipelineState State + { + get; + set; + } + + [DataMember] + public TaskResult? Result + { + get; + set; + } + + [DataMember] + public Job Definition + { + get; + set; + } + + public IDictionary Outputs + { + get + { + if (m_outputs == null) + { + m_outputs = new VariablesDictionary(); + } + return m_outputs; + } + } + + [DataMember(Name = "Outputs")] + private VariablesDictionary m_outputs; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/JobStartedEventData.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/JobStartedEventData.cs new file mode 100644 index 00000000000..84491294e32 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/JobStartedEventData.cs @@ -0,0 +1,30 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [DataContract] + public sealed class JobStartedEventData + { + [DataMember(EmitDefaultValue = false)] + public PhaseTargetType JobType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid JobId + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Object Data + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseAttempt.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseAttempt.cs new file mode 100644 index 00000000000..6995b84ab53 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseAttempt.cs @@ -0,0 +1,30 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PhaseAttempt + { + public PhaseInstance Phase + { + get; + set; + } + + public IList Jobs + { + get + { + if (m_jobs == null) + { + m_jobs = new List(); + } + return m_jobs; + } + } + + private List m_jobs; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseExecutionContext.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseExecutionContext.cs new file mode 100644 index 00000000000..41deffce004 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseExecutionContext.cs @@ -0,0 +1,116 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + /// + /// Provides context necessary for the execution of a pipeline. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class PhaseExecutionContext : GraphExecutionContext + { + public PhaseExecutionContext( + StageInstance stage = null, + PhaseInstance phase = null, + DictionaryContextData data = null, + EvaluationOptions expressionOptions = null, + ExecutionOptions executionOptions = null) + : this(stage, phase, PipelineState.InProgress, data, + new CounterStore(), new PackageStore(), new ResourceStore(), new TaskStore(), + null, null, null, expressionOptions, executionOptions) + { + } + + /// + /// Initializes a new PipelineExecutionContext instance using the specified task store. + /// + /// The store which should be utilized for task reference resolution + /// The additional pre-defined resources which should be utilized for resource resolution, like: Container + public PhaseExecutionContext( + StageInstance stage, + PhaseInstance phase, + PipelineState state, + DictionaryContextData data, + ICounterStore counterStore, + IPackageStore packageStore, + IResourceStore resourceStore, + ITaskStore taskStore, + IList stepProviders, + IPipelineIdGenerator idGenerator, + IPipelineTraceWriter trace, + EvaluationOptions expressionOptions, + ExecutionOptions executionOptions) + : base(phase, state, data, counterStore, packageStore, resourceStore, taskStore, stepProviders, idGenerator, trace, expressionOptions, executionOptions) + { + this.Stage = stage; + if (this.Stage != null) + { + this.Stage.Identifier = this.IdGenerator.GetStageIdentifier(this.Stage.Name); + } + + // Set the full identifier according to the current context + this.Phase.Identifier = this.IdGenerator.GetPhaseIdentifier(this.Stage?.Name, this.Phase.Name); + } + + /// + /// The current stage which is being executed. + /// + public StageInstance Stage + { + get; + } + + /// + /// The current phase which is being executed. + /// + public PhaseInstance Phase + { + get + { + return base.Node; + } + } + + /// + /// Gets the previous attempt of the phase if this is a retry of a job which has already executed. + /// + public PhaseAttempt PreviousAttempt + { + get; + set; + } + + public JobExecutionContext CreateJobContext( + String name, + Int32 attempt, + Int32 positionInPhase = default, + Int32 totalJobsInPhase = default) + { + return CreateJobContext( + new JobInstance(name, attempt), + positionInPhase, + totalJobsInPhase); + } + + public JobExecutionContext CreateJobContext( + JobInstance jobInstance, + Int32 positionInPhase = default, + Int32 totalJobsInPhase = default) + { + return new JobExecutionContext( + context: this, + job: jobInstance, + variables: null, + positionInPhase: positionInPhase, + totalJobsInPhase: totalJobsInPhase); + } + + internal override String GetInstanceName() + { + return this.IdGenerator.GetPhaseInstanceName(this.Stage?.Name, this.Phase.Name, this.Phase.Attempt); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseInstance.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseInstance.cs new file mode 100644 index 00000000000..7e13aaea300 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/PhaseInstance.cs @@ -0,0 +1,61 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class PhaseInstance : GraphNodeInstance + { + public PhaseInstance() + { + } + + public PhaseInstance(String name) + : this(name, TaskResult.Succeeded) + { + } + + public PhaseInstance( + String name, + Int32 attempt) + : this(name, attempt, null, TaskResult.Succeeded) + { + } + + public PhaseInstance(PhaseNode phase) + : this(phase, 1) + { + } + + public PhaseInstance( + PhaseNode phase, + Int32 attempt) + : this(phase.Name, attempt, phase, TaskResult.Succeeded) + { + } + + public PhaseInstance( + String name, + TaskResult result) + : this(name, 1, null, result) + { + } + + public PhaseInstance( + String name, + Int32 attempt, + PhaseNode definition, + TaskResult result) + : base(name, attempt, definition, result) + { + } + + public static implicit operator PhaseInstance(String name) + { + return new PhaseInstance(name); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/PipelineAttemptBuilder.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/PipelineAttemptBuilder.cs new file mode 100644 index 00000000000..7cf66fa9366 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/PipelineAttemptBuilder.cs @@ -0,0 +1,632 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Pipelines.Validation; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + /// + /// Provides functionality to build structured data from the timeline store. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class PipelineAttemptBuilder + { + public PipelineAttemptBuilder( + IPipelineIdGenerator idGenerator, + PipelineProcess pipeline, + params Timeline[] timelines) + { + ArgumentUtility.CheckForNull(idGenerator, nameof(idGenerator)); + ArgumentUtility.CheckForNull(pipeline, nameof(pipeline)); + + this.Pipeline = pipeline; + this.IdGenerator = idGenerator; + m_recordsById = new Dictionary(); + m_recordsByParent = new Dictionary>(); + m_stages = new Dictionary>(StringComparer.OrdinalIgnoreCase); + + if (timelines?.Length > 0) + { + foreach (var timeline in timelines) + { + AddStageAttempts(timeline, m_stages); + } + } + } + + /// + /// Gets the ID generator for this pipeline. + /// + public IPipelineIdGenerator IdGenerator + { + get; + } + + /// + /// Gets the pipeline document. + /// + public PipelineProcess Pipeline + { + get; + } + + /// + /// Creates the initial stage attempts for a brand new pipeline. + /// + /// A list of initial attempts which should be run + public IList Initialize() + { + var initialAttempts = new List(); + foreach (var stage in this.Pipeline.Stages) + { + initialAttempts.Add(CreateAttempt(stage)); + } + return initialAttempts; + } + + /// + /// Produce list of stage attempts needed to retry a pipeline. + /// By default, we will reuse previously successful stage attempts, and produce new attempts for + /// failed stages, and any stages downstream from a failed stage. + /// If specific stage names are provided, only these stages and their descendents will be retried, + /// and will be retried irrespective of previous state. + /// + /// tuple of all attempts (the full list of attempts to be added to the plan) and "new attempts" (the retries) + public Tuple, IList> Retry(IList stageNames = null) + { + var allAttempts = new List(); + var newAttempts = new List(); + var stagesToRetry = new HashSet(StringComparer.OrdinalIgnoreCase); + + GraphValidator.Traverse(this.Pipeline.Stages, (stage, dependencies) => + { + var previousAttempt = GetStageAttempt(stage.Name); + if (previousAttempt == null) + { + // nothing to retry + return; + } + + // collect some data + var directlyTargeted = stageNames?.Contains(stage.Name, StringComparer.OrdinalIgnoreCase) is true; + var needsRetry = NeedsRetry(previousAttempt.Stage.Result); + var dependencyNeedsRetry = dependencies.Any(x => stagesToRetry.Contains(x)); + + // create new attempt + var newAttempt = default(StageAttempt); + if (dependencyNeedsRetry + || (stageNames == default && needsRetry) + || (stageNames != default && directlyTargeted)) + { + // try to create new attempt, if it comes back null, no work needs to be done + // force a retry if the stage is directly targeted but the previous attempt was successful. + newAttempt = CreateAttempt( + stage, + previousAttempt, + forceRetry: (directlyTargeted && !needsRetry) || dependencyNeedsRetry); + } + + // update return lists + if (newAttempt == default) + { + // use previous attempt + allAttempts.Add(previousAttempt); + } + else + { + stagesToRetry.Add(previousAttempt.Stage.Name); + allAttempts.Add(newAttempt); + newAttempts.Add(newAttempt); + } + }); + + return Tuple.Create( + allAttempts as IList, + newAttempts as IList); + } + + /// + /// Create a new stage attempt and a new timeline. + /// The new timeline should contain the Pending entries for any stages, phases and jobs that need to be retried. + /// It should contain a full, re-parented, copy of the timeline subgraphs for stages, phases, and jobs that do not need to be retried. + /// + private StageAttempt CreateAttempt( + Stage stage, + StageAttempt previousStageAttempt = null, + Boolean forceRetry = false) + { + // new instance will have attempt number previous + 1 + var newStageAttempt = new StageAttempt + { + Stage = new StageInstance(stage, previousStageAttempt?.Stage.Attempt + 1 ?? 1), + Timeline = new Timeline(), + }; + + // Compute the stage ID for this attempt + var stageIdentifier = this.IdGenerator.GetStageIdentifier(newStageAttempt.Stage.Name); + var stageId = this.IdGenerator.GetStageInstanceId(newStageAttempt.Stage.Name, newStageAttempt.Stage.Attempt); + newStageAttempt.Timeline.Id = stageId; + newStageAttempt.Stage.Identifier = stageIdentifier; + + if (previousStageAttempt != null) + { + // copy the previous timeline record, reset to "Pending" state + var previousRecord = m_recordsById[this.IdGenerator.GetStageInstanceId(previousStageAttempt.Stage.Name, previousStageAttempt.Stage.Attempt)]; + newStageAttempt.Timeline.Records.Add(ResetRecord(previousRecord, null, stageId, newStageAttempt.Stage.Attempt)); + } + else + { + // create a new stage record + newStageAttempt.Timeline.Records.Add(CreateRecord(newStageAttempt.Stage, null, stageId, stage.DisplayName ?? stage.Name, nameof(Stage), m_stageOrder++, stageIdentifier)); + } + + // walk the phases. + // if a phase does not need to be retried, copy its entire timeline subgraph to the new timeline. + var phaseOrder = 1; + var phasesRetried = false; + var phasesToRetry = new HashSet(StringComparer.OrdinalIgnoreCase); + GraphValidator.Traverse(stage.Phases, (phase, dependencies) => + { + var shouldRetry = false; + var previousPhaseAttempt = previousStageAttempt?.Phases.FirstOrDefault(x => String.Equals(x.Phase.Name, phase.Name, StringComparison.OrdinalIgnoreCase)); + var upstreamDependencyNeedsRetry = dependencies.Any(x => phasesToRetry.Contains(x)); + var previousAttemptNeedsRetry = NeedsRetry(previousPhaseAttempt?.Phase.Result); + if (forceRetry || upstreamDependencyNeedsRetry || previousAttemptNeedsRetry) + { + // If the previous attempt a specific phase failed then we should retry it and everything + // downstream regardless of first attempt status. The failed phases are appended as we walk + // the graph and the set is inspected + shouldRetry = true; + phasesToRetry.Add(phase.Name); + } + + if (!shouldRetry && previousPhaseAttempt != null) + { + // This phase does not need to be retried. + // Copy timeline records from previous timeline to new timeline. + // The new timeline should report that this phase has already been run, and the parent should be the new stage. + previousPhaseAttempt.Phase.Definition = phase; + newStageAttempt.Phases.Add(previousPhaseAttempt); + + // clone so as not to mess up our lookup table. + var previousPhaseId = this.IdGenerator.GetPhaseInstanceId(newStageAttempt.Stage.Name, previousPhaseAttempt.Phase.Name, previousPhaseAttempt.Phase.Attempt); + var newPhaseRecord = m_recordsById[previousPhaseId].Clone(); + newPhaseRecord.ParentId = stageId; // this phase is already completed for the new stage. + + phaseOrder = (newPhaseRecord.Order ?? phaseOrder) + 1; // TODO: what does this do? + newStageAttempt.Timeline.Records.Add(newPhaseRecord); + + // if there are any child records of the phase, copy them too. + // they should exist exactly as they are on the new timeline. + // Only the phase needs to reparent. + newStageAttempt.Timeline.Records.AddRange(CollectAllChildren(newPhaseRecord)); + } + else + { + // This phase needs to be retried. + // Track that we are scheduling a phase for retry in this attempt + phasesRetried = true; + + // Create a new attempt record in the pending state. At runtime the job expansion logic, based on the target + // strategy, will determine what needs to be re-run and what doesn't based on the previous attempt. We don't + // make assumptions about the internals of jobs here as that is the piece the orchestrator doesn't deal with + // directly. + var newPhaseAttempt = new PhaseAttempt + { + Phase = new PhaseInstance(phase, previousPhaseAttempt?.Phase.Attempt + 1 ?? 1), + }; + + var phaseId = this.IdGenerator.GetPhaseInstanceId( + newStageAttempt.Stage.Name, + newPhaseAttempt.Phase.Name, + newPhaseAttempt.Phase.Attempt); + + newPhaseAttempt.Phase.Identifier = this.IdGenerator.GetPhaseIdentifier(newStageAttempt.Stage.Name, newPhaseAttempt.Phase.Name); + newStageAttempt.Timeline.Records.Add(CreateRecord( + newPhaseAttempt.Phase, + stageId, + phaseId, + phase.DisplayName ?? phase.Name, + nameof(Phase), + phaseOrder++, + newPhaseAttempt.Phase.Identifier)); + + // The previous attempt failed but we had no upstream failures means that this specific phase + // needs have the failed jobs re-run. + // For this case we just locate the failed jobs and create new + // attempt records to ensure they are re-run. + if (previousAttemptNeedsRetry && !upstreamDependencyNeedsRetry) + { + foreach (var previousJobAttempt in previousPhaseAttempt.Jobs) + { + var previousJobId = this.IdGenerator.GetJobInstanceId( + newStageAttempt.Stage.Name, + newPhaseAttempt.Phase.Name, + previousJobAttempt.Job.Name, + previousJobAttempt.Job.Attempt); + + if (NeedsRetry(previousJobAttempt.Job.Result)) + { + // this job needs to be retried. + // + // NOTE: + // Phases (JobFactories) normally are expanded dynamically to produce jobs. + // The phase expansion routines allow a list of configurations to be supplied. If non-empty, the JobFactories will only + // produce Jobs with the names provided. + // + // In retry attempts, we already know the Job names that will be produced, and we only want to run a subset of them. + // We can define the subset of jobs to be "expanded" by initializing the PhaseAttempt with named JobAttempts. + // See RunPhase for more details. + var newJobAttempt = new JobAttempt + { + Job = new JobInstance(previousJobAttempt.Job.Name, previousJobAttempt.Job.Attempt + 1), + }; + newJobAttempt.Job.Identifier = this.IdGenerator.GetJobIdentifier( + newStageAttempt.Stage.Name, + newPhaseAttempt.Phase.Name, + newJobAttempt.Job.Name); + newPhaseAttempt.Jobs.Add(newJobAttempt); + + // create a new record in "Pending" state based on the previous record. + var newJobId = this.IdGenerator.GetJobInstanceId( + newStageAttempt.Stage.Name, + newPhaseAttempt.Phase.Name, + newJobAttempt.Job.Name, + newJobAttempt.Job.Attempt); + newStageAttempt.Timeline.Records.Add(ResetRecord(m_recordsById[previousJobId], phaseId, newJobId, newJobAttempt.Job.Attempt)); + } + else + { + // this job does not need to be retried. + // deep copy the timeline subgraph to the new timeline. + // reparent the job record to the new phase id so the job looks completed on the new timeline. + var newJobRecord = m_recordsById[previousJobId].Clone(); + newJobRecord.ParentId = phaseId; + newPhaseAttempt.Jobs.Add(previousJobAttempt); + newStageAttempt.Timeline.Records.Add(newJobRecord); + newStageAttempt.Timeline.Records.AddRange(CollectAllChildren(newJobRecord)); + } + } + } + + newStageAttempt.Phases.Add(newPhaseAttempt); + } + }); + + if (!phasesRetried) + { + // The stage will remain complete so there is no reason to register a new attempt + return null; + } + + // If this is a new pipeline store that is empty we need to initialize the attempts for this stage. + if (!m_stages.TryGetValue(stage.Name, out IList attempts)) + { + attempts = new List(); + m_stages[stage.Name] = attempts; + } + + attempts.Add(newStageAttempt); + return newStageAttempt; + } + + public StageAttempt GetStageAttempt( + String name, + Int32 attempt = -1) + { + if (!m_stages.TryGetValue(name, out var attempts)) + { + return null; + } + + if (attempt <= 0) + { + return attempts.OrderByDescending(x => x.Stage.Attempt).FirstOrDefault(); + } + else + { + return attempts.FirstOrDefault(x => x.Stage.Attempt == attempt); + } + } + + /// + /// returns true if result should be retried. + /// + internal static Boolean NeedsRetry(TaskResult? result) + { + return result == TaskResult.Abandoned + || result == TaskResult.Canceled + || result == TaskResult.Failed; + } + + private TimelineRecord CreateRecord( + IGraphNodeInstance node, + Guid? parentId, + Guid recordId, + String name, + String type, + Int32 order, + String identifier) + { + return new TimelineRecord + { + Attempt = node.Attempt, + Id = recordId, + Identifier = identifier, + Name = name, + Order = order, + ParentId = parentId, + RecordType = type, + RefName = node.Name, + State = TimelineRecordState.Pending, + }; + } + + /// + /// creates a new timeline record with Pending state based on the input. + /// + private TimelineRecord ResetRecord( + TimelineRecord record, + Guid? parentId, + Guid newId, + Int32 attempt) + { + return new TimelineRecord + { + // new stuff + Attempt = attempt, + Id = newId, + ParentId = parentId, + State = TimelineRecordState.Pending, + + // old stuff + Identifier = record.Identifier, + Name = record.Name, + Order = record.Order, + RecordType = record.RecordType, + RefName = record.RefName, + }; + } + + /// + /// Returns tuple of recordsById, recordsByParentId + /// + internal static Tuple, IDictionary>> ParseTimeline(Timeline timeline) + { + var recordsById = new Dictionary(); + var recordsByParentId = new Dictionary>(); + + foreach (var record in timeline?.Records) + { + recordsById[record.Id] = record; + + if (record.ParentId != null) + { + if (!recordsByParentId.TryGetValue(record.ParentId.Value, out var childRecords)) + { + childRecords = new List(); + recordsByParentId.Add(record.ParentId.Value, childRecords); + } + + childRecords.Add(record); + } + else if (record.RecordType == nameof(Stage)) + { + FixRecord(record); + } + } + + return Tuple.Create( + recordsById as IDictionary, + recordsByParentId as IDictionary>); + } + + private void AddStageAttempts( + Timeline timeline, + IDictionary> attempts) + { + if (timeline == default) + { + return; // nothing to do + } + + // parse timeline + var tuple = ParseTimeline(timeline); + m_recordsById = tuple.Item1; + m_recordsByParent = tuple.Item2; + + foreach (var stageRecord in m_recordsById.Values.Where(x => x.RecordType == "Stage")) + { + var attempt = new StageAttempt + { + Stage = new StageInstance + { + Attempt = stageRecord.Attempt, + FinishTime = stageRecord.FinishTime, + Identifier = stageRecord.Identifier, + Name = stageRecord.RefName, + Result = stageRecord.Result, + StartTime = stageRecord.StartTime, + State = Convert(stageRecord.State.Value), + }, + Timeline = new Timeline + { + Id = timeline.Id, + }, + }; + + attempt.Timeline.Records.Add(stageRecord); + + if (m_recordsByParent.TryGetValue(stageRecord.Id, out var phaseRecords)) + { + AddPhaseAttempts( + attempt, + phaseRecords.Where(x => x.RecordType == nameof(Phase)), + m_recordsByParent); + } + + if (!attempts.TryGetValue(attempt.Stage.Identifier, out var stageAttempts)) + { + stageAttempts = new List(); + attempts.Add(attempt.Stage.Identifier, stageAttempts); + } + + stageAttempts.Add(attempt); + } + } + + private void AddPhaseAttempts( + StageAttempt stageAttempt, + IEnumerable phaseRecords, + IDictionary> recordsByParent) + { + foreach (var phaseRecord in phaseRecords) + { + FixRecord(phaseRecord); + + var phaseAttempt = new PhaseAttempt + { + Phase = new PhaseInstance + { + Attempt = phaseRecord.Attempt, + FinishTime = phaseRecord.FinishTime, + Identifier = phaseRecord.Identifier, + Name = phaseRecord.RefName, + Result = phaseRecord.Result, + StartTime = phaseRecord.StartTime, + State = Convert(phaseRecord.State.Value), + }, + }; + + stageAttempt.Phases.Add(phaseAttempt); + stageAttempt.Timeline.Records.Add(phaseRecord); + + // Drive down into the individual jobs if they exist + if (recordsByParent.TryGetValue(phaseRecord.Id, out var jobRecords)) + { + AddJobAttempts( + stageAttempt, + phaseAttempt, + jobRecords.Where(x => x.RecordType == nameof(Job)), + recordsByParent); + } + } + } + + private void AddJobAttempts( + StageAttempt stageAttempt, + PhaseAttempt phaseAttempt, + IEnumerable jobRecords, + IDictionary> recordsByParent) + { + foreach (var jobRecord in jobRecords) + { + FixRecord(jobRecord); + + var jobAttempt = new JobAttempt + { + Job = new JobInstance + { + Attempt = jobRecord.Attempt, + FinishTime = jobRecord.FinishTime, + Identifier = jobRecord.Identifier, + Name = jobRecord.RefName, + Result = jobRecord.Result, + StartTime = jobRecord.StartTime, + State = Convert(jobRecord.State.Value), + }, + }; + + phaseAttempt.Jobs.Add(jobAttempt); + stageAttempt.Timeline.Records.Add(jobRecord); + + // Just blindly copy the child records + stageAttempt.Timeline.Records.AddRange(CollectAllChildren(jobRecord)); + } + } + + internal IList CollectAllChildren( + TimelineRecord root, + Int32 maxDepth = int.MaxValue) + { + return CollectAllChildren(root, m_recordsByParent, maxDepth); + } + + internal static IList CollectAllChildren( + TimelineRecord root, + IDictionary> recordsByParent, + Int32 maxDepth = int.MaxValue) + { + var result = new List(); + if (!recordsByParent.TryGetValue(root.Id, out var childRecords)) + { + return result; + } + + // instead of actually recursing, create a queue of record, depth pairs. + var recordQueue = new Queue>(childRecords.Select(x => Tuple.Create(x, 1))); + while (recordQueue.Count > 0) + { + var t = recordQueue.Dequeue(); + var currentRecord = t.Item1; + var currentDepth = t.Item2; + + // collect record + result.Add(currentRecord); + + // check depth + if (currentDepth >= maxDepth) + { + continue; + } + + // enqueue children + var childDepth = currentDepth + 1; + if (recordsByParent.TryGetValue(currentRecord.Id, out var newChildren)) + { + foreach (var newChild in newChildren) + { + recordQueue.Enqueue(Tuple.Create(newChild, childDepth)); + } + } + } + + return result; + } + + private static PipelineState Convert(TimelineRecordState state) + { + switch (state) + { + case TimelineRecordState.Completed: + return PipelineState.Completed; + case TimelineRecordState.InProgress: + return PipelineState.InProgress; + } + + return PipelineState.NotStarted; + } + + /// + /// The timeline records get normalized into strings which are not case-sensitive, meaning the input + /// casing may not match what is output.In order to compensate for this we update the ref name from + /// the identifier, as the identifier reflects the actual value. + /// + private static void FixRecord(TimelineRecord record) + { + if (!String.IsNullOrEmpty(record.Identifier)) + { + record.RefName = PipelineUtilities.GetName(record.Identifier); + } + } + + // Includes all attempts of all stages + private Int32 m_stageOrder = 1; + private IDictionary m_recordsById; + private IDictionary> m_recordsByParent; + private IDictionary> m_stages; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/PipelineExecutionContext.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/PipelineExecutionContext.cs new file mode 100644 index 00000000000..f8133bc789b --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/PipelineExecutionContext.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PipelineExecutionContext : PipelineContextBase + { + private protected PipelineExecutionContext(PipelineExecutionContext context) + : base(context) + { + this.State = context.State; + this.ExecutionOptions = context.ExecutionOptions; + } + + private protected PipelineExecutionContext( + DictionaryContextData data, + ICounterStore counterStore, + IPackageStore packageStore, + IResourceStore resourceStore, + ITaskStore taskStore, + IList stepProviders, + PipelineState state, + IPipelineIdGenerator idGenerator = null, + IPipelineTraceWriter trace = null, + EvaluationOptions expressionOptions = null, + ExecutionOptions executionOptions = null) + : base(data, counterStore, packageStore, resourceStore, taskStore, stepProviders, idGenerator, trace, expressionOptions) + { + this.State = state; + this.ExecutionOptions = executionOptions ?? new ExecutionOptions(); + } + + /// + /// Gets the current state of the pipeline. + /// + public PipelineState State + { + get; + } + + /// + /// Gets the execution options used for the pipeline. + /// + public ExecutionOptions ExecutionOptions + { + get; + } + + /// + /// Gets the instance ID for the current context. + /// + /// + internal Guid GetInstanceId() + { + return this.IdGenerator.GetInstanceId(this.GetInstanceName()); + } + + /// + /// When overridden in a derived class, gets the instance name using . + /// + /// The instance name according to the associated generator + internal abstract String GetInstanceName(); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/StageAttempt.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/StageAttempt.cs new file mode 100644 index 00000000000..cbffa1ee75c --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/StageAttempt.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class StageAttempt + { + internal StageAttempt() + { + } + + public StageInstance Stage + { + get; + set; + } + + public IList Phases + { + get + { + if (m_phases == null) + { + m_phases = new List(); + } + return m_phases; + } + } + + public Timeline Timeline + { + get; + internal set; + } + + private List m_phases; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/StageExecutionContext.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/StageExecutionContext.cs new file mode 100644 index 00000000000..34a7f00fb4c --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/StageExecutionContext.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + /// + /// Provides context necessary for the execution of a pipeline. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class StageExecutionContext : GraphExecutionContext + { + public StageExecutionContext( + StageInstance stage = default, + DictionaryContextData data = null) + : this(stage, PipelineState.InProgress, data, new CounterStore(), new PackageStore(), new ResourceStore(), new TaskStore(), null, null, null, null, null) + { + } + + /// + /// Initializes a new StageExecutionContext instance using the specified stage and services. + /// + /// The store which should be utilized for task reference resolution + /// The additional pre-defined resources which should be utilized for resource resolution, like: Container + public StageExecutionContext( + StageInstance stage, + PipelineState state, + DictionaryContextData data, + ICounterStore counterStore, + IPackageStore packageStore, + IResourceStore resourceStore, + ITaskStore taskStore, + IList stepProviders, + IPipelineIdGenerator idGenerator, + IPipelineTraceWriter trace, + EvaluationOptions expressionOptions, + ExecutionOptions executionOptions) + : base(stage, state, data, counterStore, packageStore, resourceStore, taskStore, stepProviders, idGenerator, trace, expressionOptions, executionOptions) + { + this.Stage.Identifier = this.IdGenerator.GetStageIdentifier(stage.Name); + } + + /// + /// The current stage which is being executed. + /// + public StageInstance Stage => this.Node; + + /// + /// Gets the previous attempt of the stage if this is a retry of a job which has already executed. + /// + public StageAttempt PreviousAttempt + { + get; + } + + internal override String GetInstanceName() + { + return this.IdGenerator.GetStageInstanceName(this.Stage.Name, this.Stage.Attempt); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Runtime/StageInstance.cs b/src/Sdk/DTPipelines/Pipelines/Runtime/StageInstance.cs new file mode 100644 index 00000000000..93ef248c595 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Runtime/StageInstance.cs @@ -0,0 +1,61 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class StageInstance : GraphNodeInstance + { + public StageInstance() + { + } + + public StageInstance(String name) + : this(name, TaskResult.Succeeded) + { + } + + public StageInstance( + String name, + Int32 attempt) + : this(name, attempt, null, TaskResult.Succeeded) + { + } + + public StageInstance(Stage stage) + : this(stage, 1) + { + } + + public StageInstance( + Stage stage, + Int32 attempt) + : this(stage.Name, attempt, stage, TaskResult.Succeeded) + { + } + + public StageInstance( + String name, + TaskResult result) + : this(name, 1, null, result) + { + } + + public StageInstance( + String name, + Int32 attempt, + Stage definition, + TaskResult result) + : base(name, attempt, definition, result) + { + } + + public static implicit operator StageInstance(String name) + { + return new StageInstance(name); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/SecretStoreConfiguration.cs b/src/Sdk/DTPipelines/Pipelines/SecretStoreConfiguration.cs new file mode 100644 index 00000000000..544b062a96a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/SecretStoreConfiguration.cs @@ -0,0 +1,91 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class SecretStoreConfiguration + { + public SecretStoreConfiguration() + { + } + + private SecretStoreConfiguration(SecretStoreConfiguration configurationToCopy) + { + this.Endpoint = configurationToCopy.Endpoint?.Clone(); + this.StoreName = configurationToCopy.StoreName; + + m_endpointId = configurationToCopy.m_endpointId; + if (configurationToCopy.m_keys?.Count > 0) + { + m_keys = new List(configurationToCopy.m_keys); + } + } + + [DataMember(EmitDefaultValue = false)] + public ServiceEndpointReference Endpoint + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String StoreName + { + get; + set; + } + + public IList Keys + { + get + { + if (m_keys == null) + { + m_keys = new List(); + } + return m_keys; + } + } + + public SecretStoreConfiguration Clone() + { + return new SecretStoreConfiguration(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (this.Endpoint == null && m_endpointId != Guid.Empty) + { + this.Endpoint = new ServiceEndpointReference + { + Id = m_endpointId, + }; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_keys?.Count == 0) + { + m_keys = null; + } + + if (this.Endpoint != null && this.Endpoint.Id != Guid.Empty) + { + m_endpointId = this.Endpoint.Id; + } + } + + [DataMember(Name = "EndpointId", EmitDefaultValue = false)] + private Guid m_endpointId; + + [DataMember(Name = "Keys", EmitDefaultValue = false)] + private List m_keys; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/SecureFileReference.cs b/src/Sdk/DTPipelines/Pipelines/SecureFileReference.cs new file mode 100644 index 00000000000..4226c9d18cd --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/SecureFileReference.cs @@ -0,0 +1,38 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class SecureFileReference : ResourceReference + { + public SecureFileReference() + { + } + + private SecureFileReference(SecureFileReference referenceToCopy) + : base(referenceToCopy) + { + this.Id = referenceToCopy.Id; + } + + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + public SecureFileReference Clone() + { + return new SecureFileReference(this); + } + + public override String ToString() + { + return base.ToString() ?? this.Id.ToString("D"); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/SecureFileStore.cs b/src/Sdk/DTPipelines/Pipelines/SecureFileStore.cs new file mode 100644 index 00000000000..f2448ec0cda --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/SecureFileStore.cs @@ -0,0 +1,105 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class SecureFileStore : ISecureFileStore + { + public SecureFileStore( + IList files, + ISecureFileResolver resolver = null) + { + this.Resolver = resolver; + Add(files?.ToArray()); + } + + /// + /// Get the endpoint resolver configured for this store. + /// + public ISecureFileResolver Resolver + { + get; + } + + public IList GetAuthorizedReferences() + { + return m_resourcesById.Values.Select(x => new SecureFileReference { Id = x.Id }).ToList(); + } + + public SecureFile Get(SecureFileReference reference) + { + if (reference == null) + { + return null; + } + + var referenceId = reference.Id; + var referenceName = reference.Name?.Literal; + if (referenceId == Guid.Empty && String.IsNullOrEmpty(referenceName)) + { + return null; + } + + SecureFile authorizedResource = null; + if (referenceId != Guid.Empty) + { + if (m_resourcesById.TryGetValue(referenceId, out authorizedResource)) + { + return authorizedResource; + } + } + else if (!String.IsNullOrEmpty(referenceName)) + { + if (m_resourcesByName.TryGetValue(referenceName, out List matchingResources)) + { + if (matchingResources.Count > 1) + { + throw new AmbiguousResourceSpecificationException(PipelineStrings.AmbiguousServiceEndpointSpecification(referenceId)); + } + + return matchingResources[0]; + } + } + + // If we have an authorizer then attempt to authorize the reference for use + authorizedResource = this.Resolver?.Resolve(reference); + if (authorizedResource != null) + { + Add(authorizedResource); + } + + return authorizedResource; + } + + private void Add(params SecureFile[] resources) + { + if (resources?.Length > 0) + { + foreach (var resource in resources) + { + if (m_resourcesById.TryGetValue(resource.Id, out _)) + { + continue; + } + + m_resourcesById.Add(resource.Id, resource); + + if (!m_resourcesByName.TryGetValue(resource.Name, out List resourcesByName)) + { + resourcesByName = new List(); + m_resourcesByName.Add(resource.Name, resourcesByName); + } + + resourcesByName.Add(resource); + } + } + } + + private readonly Dictionary m_resourcesById = new Dictionary(); + private readonly Dictionary> m_resourcesByName = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ServerTarget.cs b/src/Sdk/DTPipelines/Pipelines/ServerTarget.cs new file mode 100644 index 00000000000..f8008352f55 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ServerTarget.cs @@ -0,0 +1,92 @@ +using System; +using System.ComponentModel; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class ServerTarget : PhaseTarget + { + public ServerTarget() + : base(PhaseTargetType.Server) + { + } + + private ServerTarget(ServerTarget targetToClone) + : base(targetToClone) + { + this.Execution = targetToClone.Execution?.Clone(); + } + + [DataMember(EmitDefaultValue = false)] + public ParallelExecutionOptions Execution + { + get; + set; + } + + public override PhaseTarget Clone() + { + return new ServerTarget(this); + } + + public override Boolean IsValid(TaskDefinition task) + { + return task.RunsOn.Contains(TaskRunsOnConstants.RunsOnServer, StringComparer.OrdinalIgnoreCase); + } + + internal override JobExecutionContext CreateJobContext( + PhaseExecutionContext context, + String jobName, + Int32 attempt, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory) + { + context.Trace?.EnterProperty("CreateJobContext"); + var e = this.Execution ?? new ParallelExecutionOptions(); + var jobContext = e.CreateJobContext( + context, + jobName, + attempt, + null, + null, + continueOnError, + timeoutInMinutes, + cancelTimeoutInMinutes, + jobFactory); + context.Trace?.LeaveProperty("CreateJobContext"); + + jobContext.Variables[WellKnownDistributedTaskVariables.EnableAccessToken] = Boolean.TrueString; + return jobContext; + } + + internal override ExpandPhaseResult Expand( + PhaseExecutionContext context, + Boolean continueOnError, + Int32 timeoutInMinutes, + Int32 cancelTimeoutInMinutes, + IJobFactory jobFactory, + JobExpansionOptions options) + { + context.Trace?.EnterProperty("Expand"); + var execution = this.Execution ?? new ParallelExecutionOptions(); + var result = execution.Expand( + context: context, + container: null, + sidecarContainers: null, + continueOnError: continueOnError, + timeoutInMinutes: timeoutInMinutes, + cancelTimeoutInMinutes: cancelTimeoutInMinutes, + jobFactory: jobFactory, + options: options); + context.Trace?.LeaveProperty("Expand"); + return result; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ServiceEndpointReference.cs b/src/Sdk/DTPipelines/Pipelines/ServiceEndpointReference.cs new file mode 100644 index 00000000000..0d066778ecd --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ServiceEndpointReference.cs @@ -0,0 +1,38 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ServiceEndpointReference : ResourceReference + { + public ServiceEndpointReference() + { + } + + private ServiceEndpointReference(ServiceEndpointReference referenceToCopy) + : base(referenceToCopy) + { + this.Id = referenceToCopy.Id; + } + + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + public ServiceEndpointReference Clone() + { + return new ServiceEndpointReference(this); + } + + public override String ToString() + { + return base.ToString() ?? this.Id.ToString("D"); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/ServiceEndpointStore.cs b/src/Sdk/DTPipelines/Pipelines/ServiceEndpointStore.cs new file mode 100644 index 00000000000..80cf530fdf4 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/ServiceEndpointStore.cs @@ -0,0 +1,118 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class ServiceEndpointStore : IServiceEndpointStore + { + public ServiceEndpointStore( + IList endpoints, + IServiceEndpointResolver resolver = null, + Boolean lazyLoadEndpoints = false) + { + this.Resolver = resolver; + this.LazyLoadEndpoints = lazyLoadEndpoints; + Add(endpoints?.ToArray()); + } + + /// + /// Get the endpoint resolver configured for this store. + /// + public IServiceEndpointResolver Resolver + { + get; + } + + public IList GetAuthorizedReferences() + { + if (LazyLoadEndpoints) + { + return this.Resolver != null ? this.Resolver.GetAuthorizedReferences() + : new List(); + } + + return m_endpointsById.Values.Select(x => new ServiceEndpointReference { Id = x.Id, Name = x.Name }).ToList(); + } + + public void Authorize(ServiceEndpointReference reference) + { + this.Resolver?.Authorize(reference); + } + + public ServiceEndpoint Get(ServiceEndpointReference reference) + { + if (reference == null) + { + return null; + } + + var referenceId = reference.Id; + var referenceName = reference.Name?.Literal; + if (referenceId == Guid.Empty && String.IsNullOrEmpty(referenceName)) + { + return null; + } + + ServiceEndpoint authorizedEndpoint = null; + if (referenceId != Guid.Empty) + { + if (m_endpointsById.TryGetValue(referenceId, out authorizedEndpoint)) + { + return authorizedEndpoint; + } + } + else if (!String.IsNullOrEmpty(referenceName)) + { + if (m_endpointsByName.TryGetValue(referenceName, out List matchingEndpoints)) + { + if (matchingEndpoints.Count > 1) + { + throw new AmbiguousResourceSpecificationException(PipelineStrings.AmbiguousServiceEndpointSpecification(referenceId)); + } + + return matchingEndpoints[0]; + } + } + + authorizedEndpoint = this.Resolver?.Resolve(reference); + if (authorizedEndpoint != null) + { + Add(authorizedEndpoint); + } + + return authorizedEndpoint; + } + + private void Add(params ServiceEndpoint[] endpoints) + { + if (endpoints?.Length > 0) + { + foreach (var endpoint in endpoints) + { + if (m_endpointsById.TryGetValue(endpoint.Id, out _)) + { + continue; + } + + m_endpointsById.Add(endpoint.Id, endpoint); + + if (!m_endpointsByName.TryGetValue(endpoint.Name, out List endpointsByName)) + { + endpointsByName = new List(); + m_endpointsByName.Add(endpoint.Name, endpointsByName); + } + + endpointsByName.Add(endpoint); + } + } + } + + private readonly Dictionary m_endpointsById = new Dictionary(); + private readonly Dictionary> m_endpointsByName = new Dictionary>(StringComparer.OrdinalIgnoreCase); + private readonly Boolean LazyLoadEndpoints; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Stage.cs b/src/Sdk/DTPipelines/Pipelines/Stage.cs new file mode 100644 index 00000000000..c94aab45c5d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Stage.cs @@ -0,0 +1,208 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines.Validation; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class Stage : IGraphNode + { + public Stage() + { + } + + public Stage( + String name, + IList phases) + { + this.Name = name; + + if (phases?.Count > 0) + { + m_phases = new List(phases); + } + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Condition + { + get; + set; + } + + public IList Variables + { + get + { + if (m_variables == null) + { + m_variables = new List(); + } + return m_variables; + } + } + + public IList Phases + { + get + { + if (m_phases == null) + { + m_phases = new List(); + } + return m_phases; + } + } + + public ISet DependsOn + { + get + { + if (m_dependsOn == null) + { + m_dependsOn = new HashSet(StringComparer.OrdinalIgnoreCase); + } + return m_dependsOn; + } + } + + void IGraphNode.Validate( + PipelineBuildContext context, + ValidationResult result) + { + // Default the condition to something reasonable if none is specified + if (String.IsNullOrEmpty(this.Condition)) + { + this.Condition = StageCondition.Default; + } + else + { + // Simply construct the condition and make sure the syntax and functions used are valid + var parsedCondition = new StageCondition(this.Condition); + } + + if (m_variables?.Count > 0) + { + var variablesCopy = new List(); + foreach (var variable in this.Variables) + { + if (variable is Variable simpleVariable) + { + // Do not allow phase overrides for certain variables. + if (Phase.s_nonOverridableVariables.Contains(simpleVariable.Name)) + { + continue; + } + } + else if (variable is VariableGroupReference groupVariable) + { + if (context.EnvironmentVersion < 2) + { + result.Errors.Add(new PipelineValidationError(PipelineStrings.StageVariableGroupNotSupported(this.Name, groupVariable))); + continue; + } + + result.ReferencedResources.VariableGroups.Add(groupVariable); + + if (context.BuildOptions.ValidateResources) + { + var variableGroup = context.ResourceStore.VariableGroups.Get(groupVariable); + if (variableGroup == null) + { + result.UnauthorizedResources.VariableGroups.Add(groupVariable); + result.Errors.Add(new PipelineValidationError(PipelineStrings.VariableGroupNotFoundForStage(this.Name, groupVariable))); + } + } + } + + variablesCopy.Add(variable); + } + + m_variables.Clear(); + m_variables.AddRange(variablesCopy); + } + + GraphValidator.Validate(context, result, PipelineStrings.JobNameWhenNoNameIsProvided, this.Name, this.Phases, Phase.GetErrorMessage); + } + + internal static String GetErrorMessage( + String code, + params Object[] values) + { + switch (code) + { + case PipelineConstants.NameInvalid: + // values[0] is the graph name which is null during stage graph validation + // values[1] is the stage name + return PipelineStrings.StageNameInvalid(values[1]); + + case PipelineConstants.NameNotUnique: + // values[0] is the graph name which is null during stage graph validation + // values[1] is the stage name + return PipelineStrings.StageNamesMustBeUnique(values[1]); + + case PipelineConstants.StartingPointNotFound: + return PipelineStrings.PipelineNotValidNoStartingStage(); + + case PipelineConstants.DependencyNotFound: + // values[0] is the graph name which is null during stage graph validation + // values[1] is the node name + // values[2] is the dependency node name + return PipelineStrings.StageDependencyNotFound(values[1], values[2]); + + case PipelineConstants.GraphContainsCycle: + // values[0] is the graph name which is null during stage graph validation + // values[1] is the node name + // values[2] is the dependency node name + return PipelineStrings.StageGraphCycleDetected(values[1], values[2]); + } + + throw new NotSupportedException(); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_dependsOn?.Count == 0) + { + m_dependsOn = null; + } + + if (m_phases?.Count == 0) + { + m_phases = null; + } + + if (m_variables?.Count == 0) + { + m_variables = null; + } + } + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private List m_variables; + + [DataMember(Name = "Phases", EmitDefaultValue = false)] + private List m_phases; + + [DataMember(Name = "DependsOn", EmitDefaultValue = false)] + private HashSet m_dependsOn; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/StageCondition.cs b/src/Sdk/DTPipelines/Pipelines/StageCondition.cs new file mode 100644 index 00000000000..874fd9862ce --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/StageCondition.cs @@ -0,0 +1,22 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Pipelines.Runtime; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class StageCondition : GraphCondition + { + public StageCondition(String condition) + : base(condition) + { + } + + public ConditionResult Evaluate(StageExecutionContext context) + { + var traceWriter = new ConditionTraceWriter(); + var evaluationResult = m_parsedCondition.Evaluate(traceWriter, context.SecretMasker, context, context.ExpressionOptions); + return new ConditionResult() { Value = evaluationResult.IsTruthy, Trace = traceWriter.Trace }; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Step.cs b/src/Sdk/DTPipelines/Pipelines/Step.cs new file mode 100644 index 00000000000..7fe27dca36e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Step.cs @@ -0,0 +1,88 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [KnownType(typeof(ActionStep))] + [KnownType(typeof(TaskStep))] + [KnownType(typeof(TaskTemplateStep))] + [KnownType(typeof(GroupStep))] + [JsonConverter(typeof(StepConverter))] + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class Step + { + protected Step() + { + this.Enabled = true; + } + + protected Step(Step stepToClone) + { + this.Enabled = stepToClone.Enabled; + this.Id = stepToClone.Id; + this.Name = stepToClone.Name; + this.DisplayName = stepToClone.DisplayName; + } + + [DataMember(EmitDefaultValue = false)] + public abstract StepType Type + { + get; + } + + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + [DefaultValue(true)] + [DataMember(EmitDefaultValue = false)] + public Boolean Enabled + { + get; + set; + } + + public abstract Step Clone(); + } + + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public enum StepType + { + [DataMember] + Task = 1, + + [DataMember] + TaskTemplate = 2, + + [DataMember] + Group = 3, + + [DataMember] + Action = 4, + + [DataMember] + [Obsolete("Deprecated", false)] + Script = 5, + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/StepConverter.cs b/src/Sdk/DTPipelines/Pipelines/StepConverter.cs new file mode 100644 index 00000000000..d617d50c6f0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/StepConverter.cs @@ -0,0 +1,101 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.Pipelines +{ + internal sealed class StepConverter : VssSecureJsonConverter + { + public override bool CanWrite + { + get + { + return false; + } + } + + public override bool CanConvert(Type objectType) + { + return typeof(Step).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + JObject value = JObject.Load(reader); + if (!value.TryGetValue("Type", StringComparison.OrdinalIgnoreCase, out JToken stepTypeValue)) + { + Step compatStepObject; + if (value.TryGetValue("Parameters", StringComparison.OrdinalIgnoreCase, out _)) + { + compatStepObject = new TaskTemplateStep(); + } + else + { + compatStepObject = new TaskStep(); + } + + using (var objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, compatStepObject); + } + + return compatStepObject; + } + else + { + StepType stepType; + if (stepTypeValue.Type == JTokenType.Integer) + { + stepType = (StepType)(Int32)stepTypeValue; + } + else if (stepTypeValue.Type != JTokenType.String || !Enum.TryParse((String)stepTypeValue, true, out stepType)) + { + return null; + } + + Step stepObject = null; + switch (stepType) + { + case StepType.Action: + stepObject = new ActionStep(); + break; + + case StepType.Group: + stepObject = new GroupStep(); + break; + + case StepType.Task: + stepObject = new TaskStep(); + break; + + case StepType.TaskTemplate: + stepObject = new TaskTemplateStep(); + break; + } + + using (var objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, stepObject); + } + + return stepObject; + } + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/StrategyResult.cs b/src/Sdk/DTPipelines/Pipelines/StrategyResult.cs new file mode 100644 index 00000000000..9d7973e37d5 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/StrategyResult.cs @@ -0,0 +1,33 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class StrategyResult + { + public StrategyResult() + { + FailFast = true; + } + + public Boolean FailFast { get; set; } + + public int MaxParallel { get; set; } + + public IList Configurations { get; } = new List(); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class StrategyConfiguration + { + public String DisplayName { get; set; } + + public String Name { get; set; } + + public IDictionary ContextData { get; } = new Dictionary(StringComparer.Ordinal); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskCondition.cs b/src/Sdk/DTPipelines/Pipelines/TaskCondition.cs new file mode 100644 index 00000000000..4be8abd1159 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskCondition.cs @@ -0,0 +1,166 @@ +using System; +using System.ComponentModel; +using System.Linq; +using System.Text; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Expressions2.Sdk; +using GitHub.DistributedTask.Pipelines.Expressions; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class TaskCondition + { + public TaskCondition(String condition) + { + m_condition = condition ?? Default; + m_parser = new ExpressionParser(); + m_parsedCondition = m_parser.CreateTree(m_condition, new ConditionTraceWriter(), s_namedValueInfo, s_functionInfo); + m_requiresVariables = new Lazy(HasVariablesReference); + } + + /// + /// Gets the default condition if none is specified + /// + public static String Default + { + get + { + return "success()"; + } + } + + public Boolean RequiresVariables + { + get + { + return m_requiresVariables.Value; + } + } + + public ConditionResult Evaluate(JobExecutionContext context) + { + var traceWriter = new ConditionTraceWriter(); + var evaluationResult = m_parsedCondition.Evaluate(traceWriter, context.SecretMasker, context, context.ExpressionOptions); + return new ConditionResult() { Value = evaluationResult.IsTruthy, Trace = traceWriter.Trace }; + } + + private Boolean HasVariablesReference() + { + return false; + } + + private readonly String m_condition; + private readonly ExpressionParser m_parser; + private readonly IExpressionNode m_parsedCondition; + private readonly Lazy m_requiresVariables; + + private static readonly INamedValueInfo[] s_namedValueInfo = new INamedValueInfo[] + { + }; + + private static readonly IFunctionInfo[] s_functionInfo = new IFunctionInfo[] + { + new FunctionInfo("always", 0, 0), + new FunctionInfo("failure", 0, 0), + new FunctionInfo("cancelled", 0, 0), + new FunctionInfo("success", 0, 0), + }; + + private sealed class ConditionTraceWriter : ITraceWriter + { + public String Trace + { + get + { + return m_info.ToString(); + } + } + + public void Info(String message) + { + m_info.AppendLine(message); + } + + public void Verbose(String message) + { + // Not interested + } + + private StringBuilder m_info = new StringBuilder(); + } + + private sealed class AlwaysNode : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + return true; + } + } + + private sealed class CancelledNode : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var conditionContext = context.State as JobExecutionContext; + return conditionContext.State == PipelineState.Canceling; + } + } + + private sealed class FailureNode : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var executionContext = context.State as JobExecutionContext; + if (executionContext.State != PipelineState.InProgress) + { + return false; + } + + // The variable should always be set into the environment for a job + if (!executionContext.Variables.TryGetValue(WellKnownDistributedTaskVariables.JobStatus, out var value) || + !Enum.TryParse(value.Value, true, out var result)) + { + return false; + } + + return result == TaskResult.Failed; + } + } + + private sealed class SuccessNode : Function + { + protected override Object EvaluateCore( + EvaluationContext context, + out ResultMemory resultMemory) + { + resultMemory = null; + var executionContext = context.State as JobExecutionContext; + if (executionContext.State != PipelineState.InProgress) + { + return false; + } + + // The variable should always be set into the environment for a job + if (!executionContext.Variables.TryGetValue(WellKnownDistributedTaskVariables.JobStatus, out var value) || + !Enum.TryParse(value.Value, true, out var result)) + { + return false; + } + + return result == TaskResult.Succeeded || result == TaskResult.SucceededWithIssues; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskDefinitionExtensions.cs b/src/Sdk/DTPipelines/Pipelines/TaskDefinitionExtensions.cs new file mode 100644 index 00000000000..5c68c5d97df --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskDefinitionExtensions.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class TaskDefinitionExtensions + { + public static String ComputeDisplayName( + this TaskDefinition taskDefinition, + IDictionary inputs) + { + if (!String.IsNullOrEmpty(taskDefinition.InstanceNameFormat)) + { + return VariableUtility.ExpandVariables(taskDefinition.InstanceNameFormat, inputs); + } + else if (!String.IsNullOrEmpty(taskDefinition.FriendlyName)) + { + return taskDefinition.FriendlyName; + } + else + { + return taskDefinition.Name; + } + } + + /// + /// Returns the maximum of the two versions: the currentMinimum and the task's MinimumAgentVersion + /// + public static String GetMinimumAgentVersion( + this TaskDefinition taskDefinition, + String currentMinimum) + { + String minimumVersion; + + // If task.minAgentVersion > currentMin, this task needs a newer agent. So, return task.minAgentVersion + if (DemandMinimumVersion.CompareVersion(taskDefinition.MinimumAgentVersion, currentMinimum) > 0) + { + minimumVersion = taskDefinition.MinimumAgentVersion; + } + else + { + minimumVersion = currentMinimum; + } + + // If any of the task execution jobs requires Node10, return the minimum agent version that supports it + if (taskDefinition.RequiresNode10() && + DemandMinimumVersion.CompareVersion(s_node10MinAgentVersion, minimumVersion) > 0) + { + minimumVersion = s_node10MinAgentVersion; + } + + return minimumVersion; + } + + private static bool RequiresNode10( + this TaskDefinition taskDefinition) + { + return taskDefinition.PreJobExecution.Keys.Contains(s_node10, StringComparer.OrdinalIgnoreCase) || + taskDefinition.Execution.Keys.Contains(s_node10, StringComparer.OrdinalIgnoreCase) || + taskDefinition.PostJobExecution.Keys.Contains(s_node10, StringComparer.OrdinalIgnoreCase); + } + + private static string s_node10MinAgentVersion = "2.144.0"; + private static string s_node10 = "Node10"; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskStep.cs b/src/Sdk/DTPipelines/Pipelines/TaskStep.cs new file mode 100644 index 00000000000..61be80b0fbc --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskStep.cs @@ -0,0 +1,177 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class TaskStep : JobStep + { + [JsonConstructor] + public TaskStep() + { + } + + internal TaskStep(TaskInstance legacyTaskInstance) + { + this.ContinueOnError = new BooleanToken(null, null, null, legacyTaskInstance.ContinueOnError); + this.DisplayName = legacyTaskInstance.DisplayName; + this.Enabled = legacyTaskInstance.Enabled; + this.Id = legacyTaskInstance.InstanceId; + this.Name = legacyTaskInstance.RefName; + this.TimeoutInMinutes = new NumberToken(null, null, null, legacyTaskInstance.TimeoutInMinutes); + this.Reference = new TaskStepDefinitionReference() + { + Id = legacyTaskInstance.Id, + Name = legacyTaskInstance.Name, + Version = legacyTaskInstance.Version + }; + + if (!String.IsNullOrEmpty(legacyTaskInstance.Condition)) + { + this.Condition = legacyTaskInstance.Condition; + } + else if (legacyTaskInstance.AlwaysRun) + { + this.Condition = "succeededOrFailed()"; + } + else + { + this.Condition = "succeeded()"; + } + + foreach (var input in legacyTaskInstance.Inputs) + { + this.Inputs[input.Key] = input.Value; + } + + foreach (var env in legacyTaskInstance.Environment) + { + this.Environment[env.Key] = env.Value; + } + } + + private TaskStep(TaskStep taskToClone) + : base(taskToClone) + { + this.Reference = taskToClone.Reference?.Clone(); + + if (taskToClone.m_environment?.Count > 0) + { + m_environment = new Dictionary(taskToClone.m_environment, StringComparer.OrdinalIgnoreCase); + } + + if (taskToClone.m_inputs?.Count > 0) + { + m_inputs = new Dictionary(taskToClone.m_inputs, StringComparer.OrdinalIgnoreCase); + } + } + + public override StepType Type => StepType.Task; + + [DataMember] + public TaskStepDefinitionReference Reference + { + get; + set; + } + + public IDictionary Environment + { + get + { + if (m_environment == null) + { + m_environment = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_environment; + } + } + + public IDictionary Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_inputs; + } + } + + public override Step Clone() + { + return new TaskStep(this); + } + + internal TaskInstance ToLegacyTaskInstance() + { + TaskInstance task = new TaskInstance() + { + AlwaysRun = String.Equals(this.Condition ?? String.Empty, "succeededOrFailed()", StringComparison.Ordinal), + Condition = this.Condition, + ContinueOnError = this.ContinueOnError?.AssertBoolean(null).Value ?? false, + DisplayName = this.DisplayName, + Enabled = this.Enabled, + InstanceId = this.Id, + RefName = this.Name, + TimeoutInMinutes = (Int32)(this.TimeoutInMinutes?.AssertNumber(null).Value ?? 0d), + Id = this.Reference.Id, + Name = this.Reference.Name, + Version = this.Reference.Version, + }; + + foreach (var env in this.Environment) + { + task.Environment[env.Key] = env.Value; + } + + foreach (var input in this.Inputs) + { + task.Inputs[input.Key] = input.Value; + } + + return task; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_environment?.Count == 0) + { + m_environment = null; + } + + if (m_inputs?.Count == 0) + { + m_inputs = null; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (m_environment != null) + { + m_environment = new Dictionary(m_environment, StringComparer.OrdinalIgnoreCase); + } + + if (m_inputs != null) + { + m_inputs = new Dictionary(m_inputs, StringComparer.OrdinalIgnoreCase); + } + } + + [DataMember(Name = "Environment", EmitDefaultValue = false)] + private IDictionary m_environment; + + [DataMember(Name = "Inputs", EmitDefaultValue = false)] + private IDictionary m_inputs; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskStepDefinitionReference.cs b/src/Sdk/DTPipelines/Pipelines/TaskStepDefinitionReference.cs new file mode 100644 index 00000000000..b58762acf4e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskStepDefinitionReference.cs @@ -0,0 +1,51 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class TaskStepDefinitionReference : ITaskDefinitionReference + { + [JsonConstructor] + public TaskStepDefinitionReference() + { + } + + private TaskStepDefinitionReference(TaskStepDefinitionReference referenceToClone) + { + this.Id = referenceToClone.Id; + this.Name = referenceToClone.Name; + this.Version = referenceToClone.Version; + } + + [DataMember] + public Guid Id + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public String Version + { + get; + set; + } + + public TaskStepDefinitionReference Clone() + { + return new TaskStepDefinitionReference(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskStore.cs b/src/Sdk/DTPipelines/Pipelines/TaskStore.cs new file mode 100644 index 00000000000..ce27813a3be --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskStore.cs @@ -0,0 +1,214 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Pipelines.Artifacts; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechanism for efficient resolution of task specifications to specific versions of the tasks. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class TaskStore : ITaskStore + { + public TaskStore(params TaskDefinition[] tasks) + : this((IEnumerable)tasks) + { + } + + /// + /// Constructs a new TaskStore instance with the specified tasks. + /// + /// All tasks which should be made available for task resolution + public TaskStore( + IEnumerable tasks, + ITaskResolver resolver = null) + { + m_nameLookup = new Dictionary>>(StringComparer.OrdinalIgnoreCase); + m_tasks = new Dictionary>(); + Resolver = resolver; + + // Filter out legacy tasks with conflicting names. + // + // The PublishBuildArtifacts V0 task ID is different from PublishBuildArtifacts V1. + // Attempts to resolve the PublishBuildArtifacts task by name will result in name conflict. + // The PublishBuildArtfacts V0 task is not in use anymore. It can simply be removed from + // the list of tasks, and the naming conflict averted. + // + // Additional details: The PublishBuildArtifacts V0 split into two tasks: PublishBuildArtifacts V1 + // and CopyPublishBuildArtifacts V1. The CopyPublishBuildArtifacts V1 task retained the GUID and a + // new GUID was generated for PublishBuildArtifacts V0. The split happened before task-major-version-locking + // was implemented. Therefore, no definitions are using the old version. + tasks = WellKnownTaskDefinitions + .Concat(tasks?.Where(x => !(x.Id == s_publishBuildArtifacts_v0_ID && x.Version?.Major == 0)) ?? Enumerable.Empty()) + .ToList(); + + // Build a lookup of all task versions for a given task identifier + foreach (var task in tasks) + { + AddVersion(task); + } + + // Filter the tasks to the latest within each major version so we can provide a lookup by name + var latestTasksByMajorVersion = tasks.GroupBy(x => new { x.Id, x.Version.Major }).Select(x => x.OrderByDescending(y => y.Version).First()); + foreach (var task in latestTasksByMajorVersion) + { + // The name should never be null in production environments but just in case don't provide by-name lookup + // for tasks which don't provide one. + if (!String.IsNullOrEmpty(task.Name)) + { + // Add the name lookup. + IDictionary> tasksByIdLookup; + if (!m_nameLookup.TryGetValue(task.Name, out tasksByIdLookup)) + { + tasksByIdLookup = new Dictionary>(); + m_nameLookup.Add(task.Name, tasksByIdLookup); + } + + IList tasksById; + if (!tasksByIdLookup.TryGetValue(task.Id, out tasksById)) + { + tasksById = new List(); + tasksByIdLookup.Add(task.Id, tasksById); + } + + tasksById.Add(task); + + if (!String.IsNullOrEmpty(task.ContributionIdentifier)) + { + // Add the contribution-qualified-name lookup. + var qualifiedName = $"{task.ContributionIdentifier}.{task.Name}"; + if (!m_nameLookup.TryGetValue(qualifiedName, out tasksByIdLookup)) + { + tasksByIdLookup = new Dictionary>(); + m_nameLookup.Add(qualifiedName, tasksByIdLookup); + } + + if (!tasksByIdLookup.TryGetValue(task.Id, out tasksById)) + { + tasksById = new List(); + tasksByIdLookup.Add(task.Id, tasksById); + } + + tasksById.Add(task); + } + } + } + } + + public ITaskResolver Resolver + { + get; + } + + /// + /// Resolves a task from the store using the unqiue identifier and version. + /// + /// The unique identifier of the task + /// The version of the task which is desired + /// The closest matching task definition if found; otherwise, null + public TaskDefinition ResolveTask( + Guid taskId, + String versionSpec) + { + TaskDefinition task = null; + + // Treat missing version as "*" + if (String.IsNullOrEmpty(versionSpec)) + { + versionSpec = "*"; + } + + if (m_tasks.TryGetValue(taskId, out IDictionary tasks)) + { + var parsedSpec = TaskVersionSpec.Parse(versionSpec); + task = parsedSpec.Match(tasks.Values); + } + + // Read-thru on miss + if (task == null && Resolver != null) + { + task = Resolver.Resolve(taskId, versionSpec); + if (task != null) + { + AddVersion(task); + } + } + + return task; + } + + /// + /// Resolves a task from the store using the specified name and version. + /// + /// The name of the task + /// The version of the task which is desired + /// The closest matching task definition if found; otherwise, null + public TaskDefinition ResolveTask( + String name, + String versionSpec) + { + Guid taskIdentifier; + if (!Guid.TryParse(name, out taskIdentifier)) + { + IDictionary> nameLookup; + if (!m_nameLookup.TryGetValue(name, out nameLookup)) + { + return null; + } + + if (nameLookup.Count == 1) + { + // Exactly one task ID was resolved. + taskIdentifier = nameLookup.Keys.Single(); + } + else + { + // More than one task ID was resolved. + // Prefer in-the-box tasks over extension tasks. + var inTheBoxTaskIdentifiers = + nameLookup + .Where(pair => pair.Value.All(taskDefinition => String.IsNullOrEmpty(taskDefinition.ContributionIdentifier))) + .Select(pair => pair.Key) + .ToList(); + if (inTheBoxTaskIdentifiers.Count == 1) + { + taskIdentifier = inTheBoxTaskIdentifiers[0]; + } + else + { + // Otherwise, ambiguous. + throw new AmbiguousTaskSpecificationException(PipelineStrings.AmbiguousTaskSpecification(name, String.Join(", ", nameLookup.Keys))); + } + } + } + + return ResolveTask(taskIdentifier, versionSpec); + } + + private void AddVersion(TaskDefinition task) + { + IDictionary tasksByVersion; + if (!m_tasks.TryGetValue(task.Id, out tasksByVersion)) + { + tasksByVersion = new Dictionary(StringComparer.OrdinalIgnoreCase); + m_tasks.Add(task.Id, tasksByVersion); + } + + tasksByVersion[task.Version] = task; + } + + private IDictionary> m_tasks; + private IDictionary>> m_nameLookup; + + private static readonly Guid s_publishBuildArtifacts_v0_ID = new Guid("1d341bb0-2106-458c-8422-d00bcea6512a"); + + private static readonly TaskDefinition[] WellKnownTaskDefinitions = new[] + { + PipelineConstants.CheckoutTask, + PipelineArtifactConstants.DownloadTask, + }; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskTemplateReference.cs b/src/Sdk/DTPipelines/Pipelines/TaskTemplateReference.cs new file mode 100644 index 00000000000..532867d204e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskTemplateReference.cs @@ -0,0 +1,47 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class TaskTemplateReference + { + public TaskTemplateReference() + { + } + + private TaskTemplateReference(TaskTemplateReference referenceToClone) + { + this.Id = referenceToClone.Id; + this.Name = referenceToClone.Name; + this.Version = referenceToClone.Version; + } + + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Version + { + get; + set; + } + + public TaskTemplateReference Clone() + { + return new TaskTemplateReference(this); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskTemplateStep.cs b/src/Sdk/DTPipelines/Pipelines/TaskTemplateStep.cs new file mode 100644 index 00000000000..708d58455b3 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskTemplateStep.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class TaskTemplateStep : Step + { + public TaskTemplateStep() + { + } + + private TaskTemplateStep(TaskTemplateStep templateToClone) + : base(templateToClone) + { + this.Reference = templateToClone.Reference?.Clone(); + + if (templateToClone.m_parameters?.Count > 0) + { + m_parameters = new Dictionary(templateToClone.m_parameters, StringComparer.OrdinalIgnoreCase); + } + } + + public override StepType Type => StepType.TaskTemplate; + + public IDictionary Parameters + { + get + { + if (m_parameters == null) + { + m_parameters = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_parameters; + } + } + + [DataMember(EmitDefaultValue = false)] + public TaskTemplateReference Reference + { + get; + set; + } + + public override Step Clone() + { + return new TaskTemplateStep(this); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_parameters?.Count == 0) + { + m_parameters = null; + } + } + + [DataMember(Name = "Parameters", EmitDefaultValue = false)] + private IDictionary m_parameters; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TaskTemplateStore.cs b/src/Sdk/DTPipelines/Pipelines/TaskTemplateStore.cs new file mode 100644 index 00000000000..a48762ef7f7 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TaskTemplateStore.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class TaskTemplateStore : ITaskTemplateStore + { + public TaskTemplateStore(IList resolvers) + { + m_resolvers = new List(resolvers ?? Enumerable.Empty()); + } + + public void AddProvider(ITaskTemplateResolver resolver) + { + ArgumentUtility.CheckForNull(resolver, nameof(resolver)); + m_resolvers.Add(resolver); + } + + public IEnumerable ResolveTasks(TaskTemplateStep step) + { + var resolver = m_resolvers.FirstOrDefault(x => x.CanResolve(step.Reference)); + if (resolver == null) + { + throw new NotSupportedException(PipelineStrings.TaskTemplateNotSupported(step.Reference.Name, step.Reference.Version)); + } + + return resolver.ResolveTasks(step); + } + + private readonly IList m_resolvers; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/TimelineRecordIdGenerator.cs b/src/Sdk/DTPipelines/Pipelines/TimelineRecordIdGenerator.cs new file mode 100644 index 00000000000..b6a54e86386 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/TimelineRecordIdGenerator.cs @@ -0,0 +1,189 @@ +using System; +using System.ComponentModel; +using System.Text; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class TimelineRecordIdGenerator + { + public static Guid GetId(String refName) + { + byte[] bytes = Encoding.BigEndianUnicode.GetBytes(refName); + var sha1ForNonSecretPurposes = new Sha1ForNonSecretPurposes(); + sha1ForNonSecretPurposes.Start(); + sha1ForNonSecretPurposes.Append(namespaceBytes); + sha1ForNonSecretPurposes.Append(bytes); + Array.Resize(ref bytes, 16); + sha1ForNonSecretPurposes.Finish(bytes); + bytes[7] = (byte)((bytes[7] & 15) | 80); + return new Guid(bytes); + } + + // Value of 'DistributedTask.Pipelines' encoded without the namespace bytes on the front + private static readonly byte[] namespaceBytes = new byte[] + { + 83, + 55, + 27, + 127, + 212, + 97, + 75, + 93, + 197, + 226, + 39, + 51, + 83, + 35, + 223, + 36 + }; + + private struct Sha1ForNonSecretPurposes + { + private long length; + + private uint[] w; + + private int pos; + + public void Start() + { + if (this.w == null) + { + this.w = new uint[85]; + } + this.length = 0L; + this.pos = 0; + this.w[80] = 1732584193u; + this.w[81] = 4023233417u; + this.w[82] = 2562383102u; + this.w[83] = 271733878u; + this.w[84] = 3285377520u; + } + + public void Append(byte input) + { + this.w[this.pos / 4] = (this.w[this.pos / 4] << 8 | (uint)input); + int arg_35_0 = 64; + int num = this.pos + 1; + this.pos = num; + if (arg_35_0 == num) + { + this.Drain(); + } + } + + public void Append(byte[] input) + { + for (int i = 0; i < input.Length; i++) + { + byte input2 = input[i]; + this.Append(input2); + } + } + + public void Finish(byte[] output) + { + long num = this.length + (long)(8 * this.pos); + this.Append(128); + while (this.pos != 56) + { + this.Append(0); + } + this.Append((byte)(num >> 56)); + this.Append((byte)(num >> 48)); + this.Append((byte)(num >> 40)); + this.Append((byte)(num >> 32)); + this.Append((byte)(num >> 24)); + this.Append((byte)(num >> 16)); + this.Append((byte)(num >> 8)); + this.Append((byte)num); + int num2 = (output.Length < 20) ? output.Length : 20; + for (int num3 = 0; num3 != num2; num3++) + { + uint num4 = this.w[80 + num3 / 4]; + output[num3] = (byte)(num4 >> 24); + this.w[80 + num3 / 4] = num4 << 8; + } + } + + private void Drain() + { + for (int num = 16; num != 80; num++) + { + this.w[num] = Rol1(this.w[num - 3] ^ this.w[num - 8] ^ this.w[num - 14] ^ this.w[num - 16]); + } + uint num2 = this.w[80]; + uint num3 = this.w[81]; + uint num4 = this.w[82]; + uint num5 = this.w[83]; + uint num6 = this.w[84]; + for (int num7 = 0; num7 != 20; num7++) + { + uint num8 = (num3 & num4) | (~num3 & num5); + uint num9 = Rol5(num2) + num8 + num6 + 1518500249u + this.w[num7]; + num6 = num5; + num5 = num4; + num4 = Rol30(num3); + num3 = num2; + num2 = num9; + } + for (int num10 = 20; num10 != 40; num10++) + { + uint num11 = num3 ^ num4 ^ num5; + uint num12 = Rol5(num2) + num11 + num6 + 1859775393u + this.w[num10]; + num6 = num5; + num5 = num4; + num4 = Rol30(num3); + num3 = num2; + num2 = num12; + } + for (int num13 = 40; num13 != 60; num13++) + { + uint num14 = (num3 & num4) | (num3 & num5) | (num4 & num5); + uint num15 = Rol5(num2) + num14 + num6 + 2400959708u + this.w[num13]; + num6 = num5; + num5 = num4; + num4 = Rol30(num3); + num3 = num2; + num2 = num15; + } + for (int num16 = 60; num16 != 80; num16++) + { + uint num17 = num3 ^ num4 ^ num5; + uint num18 = Rol5(num2) + num17 + num6 + 3395469782u + this.w[num16]; + num6 = num5; + num5 = num4; + num4 = Rol30(num3); + num3 = num2; + num2 = num18; + } + this.w[80] += num2; + this.w[81] += num3; + this.w[82] += num4; + this.w[83] += num5; + this.w[84] += num6; + this.length += 512L; + this.pos = 0; + } + + private static uint Rol1(uint input) + { + return input << 1 | input >> 31; + } + + private static uint Rol5(uint input) + { + return input << 5 | input >> 27; + } + + private static uint Rol30(uint input) + { + return input << 30 | input >> 2; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/GraphValidator.cs b/src/Sdk/DTPipelines/Pipelines/Validation/GraphValidator.cs new file mode 100644 index 00000000000..8a1d7535f11 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/GraphValidator.cs @@ -0,0 +1,186 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + internal static class GraphValidator + { + internal delegate String ErrorFormatter(String code, params Object[] values); + + internal static void Validate( + PipelineBuildContext context, + ValidationResult result, + Func getBaseRefName, + String graphName, + IList nodes, + ErrorFormatter formatError) where T : class, IGraphNode + { + var unnamedNodes = new List(); + var startingNodes = new List(); + var knownNames = new HashSet(StringComparer.OrdinalIgnoreCase); + Boolean hasDuplicateName = false; + foreach (var node in nodes) + { + if (!String.IsNullOrEmpty(node.Name)) + { + if (!NameValidation.IsValid(node.Name, context.BuildOptions.AllowHyphenNames)) + { + result.Errors.Add(new PipelineValidationError(PipelineConstants.NameInvalid, formatError(PipelineConstants.NameInvalid, graphName, node.Name))); + } + else if (!knownNames.Add(node.Name)) + { + hasDuplicateName = true; + result.Errors.Add(new PipelineValidationError(PipelineConstants.NameNotUnique, formatError(PipelineConstants.NameNotUnique, graphName, node.Name))); + } + } + else + { + unnamedNodes.Add(node); + } + + if (node.DependsOn.Count == 0) + { + startingNodes.Add(node); + } + } + + Int32 nodeCounter = 1; + foreach (var unnamedNode in unnamedNodes) + { + var candidateName = getBaseRefName(nodeCounter); + while (!knownNames.Add(candidateName)) + { + nodeCounter++; + candidateName = getBaseRefName(nodeCounter); + } + + nodeCounter++; + unnamedNode.Name = candidateName; + } + + // Now that we have generated default names we can validate and provide error messages + foreach (var node in nodes) + { + node.Validate(context, result); + } + + if (startingNodes.Count == 0) + { + result.Errors.Add(new PipelineValidationError(PipelineConstants.StartingPointNotFound, formatError(PipelineConstants.StartingPointNotFound, graphName))); + return; + } + + // Skip validating the graph if duplicate phase names + if (hasDuplicateName) + { + return; + } + + var nodesToVisit = new Queue(startingNodes); + var nodeLookup = nodes.ToDictionary(x => x.Name, StringComparer.OrdinalIgnoreCase); + var unsatisfiedDependencies = nodes.ToDictionary(x => x.Name, x => new List(x.DependsOn), StringComparer.OrdinalIgnoreCase); + var visitedNames = new HashSet(StringComparer.OrdinalIgnoreCase); + while (nodesToVisit.Count > 0) + { + var currentPhase = nodesToVisit.Dequeue(); + + visitedNames.Add(currentPhase.Name); + + // Now figure out which nodes would start as a result of this + foreach (var nodeState in unsatisfiedDependencies) + { + for (Int32 i = nodeState.Value.Count - 1; i >= 0; i--) + { + if (nodeState.Value[i].Equals(currentPhase.Name, StringComparison.OrdinalIgnoreCase)) + { + nodeState.Value.RemoveAt(i); + if (nodeState.Value.Count == 0) + { + nodesToVisit.Enqueue(nodeLookup[nodeState.Key]); + } + } + } + } + } + + // There are nodes which are never going to execute, which is generally caused by a cycle in the graph. + var unreachableNodeCount = nodes.Count - visitedNames.Count; + if (unreachableNodeCount > 0) + { + foreach (var unreachableNode in unsatisfiedDependencies.Where(x => x.Value.Count > 0)) + { + foreach (var unsatisifedDependency in unreachableNode.Value) + { + if (!nodeLookup.ContainsKey(unsatisifedDependency)) + { + result.Errors.Add(new PipelineValidationError(PipelineConstants.DependencyNotFound, formatError(PipelineConstants.DependencyNotFound, graphName, unreachableNode.Key, unsatisifedDependency))); + } + else + { + result.Errors.Add(new PipelineValidationError(PipelineConstants.GraphContainsCycle, formatError(PipelineConstants.GraphContainsCycle, graphName, unreachableNode.Key, unsatisifedDependency))); + } + } + } + } + } + + /// + /// Traverses a validated graph running a callback on each node in the order it would execute at runtime. + /// + /// The type of graph node + /// The full set of nodes in the graph + /// A callback which is invoked for each node as execution would begin + internal static void Traverse( + IList nodes, + Action> handleNode) where T : class, IGraphNode + { + var nodeLookup = nodes.ToDictionary(x => x.Name, x => new GraphTraversalState(x), StringComparer.OrdinalIgnoreCase); + var pendingNodes = nodes.ToDictionary(x => x.Name, x => new List(x.DependsOn), StringComparer.OrdinalIgnoreCase); + var nodesToVisit = new Queue>(nodes.Where(x => x.DependsOn.Count == 0).Select(x => new GraphTraversalState(x))); + while (nodesToVisit.Count > 0) + { + var currentNode = nodesToVisit.Dequeue(); + + // Invoke the callback on this node since it would execute next. The dependencies provided to the + // callback is a fully recursive set of all dependencies for context on how a node would execute + // at runtime. + handleNode(currentNode.Node, currentNode.Dependencies); + + // Now figure out which nodes would start as a result of this + foreach (var nodeState in pendingNodes) + { + for (Int32 i = nodeState.Value.Count - 1; i >= 0; i--) + { + if (nodeState.Value[i].Equals(currentNode.Node.Name, StringComparison.OrdinalIgnoreCase)) + { + nodeState.Value.RemoveAt(i); + + // Make sure we include the completed nodes recursive dependency set into the dependent + // node recursive dependency set for accurate hit detection. + var traversalState = nodeLookup[nodeState.Key]; + traversalState.Dependencies.Add(currentNode.Node.Name); + traversalState.Dependencies.UnionWith(currentNode.Dependencies); + + if (nodeState.Value.Count == 0) + { + nodesToVisit.Enqueue(traversalState); + } + } + } + } + } + } + + private class GraphTraversalState where T : class, IGraphNode + { + public GraphTraversalState(T node) + { + this.Node = node; + } + + public T Node { get; } + public ISet Dependencies { get; } = new HashSet(StringComparer.OrdinalIgnoreCase); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/IInputValidator.cs b/src/Sdk/DTPipelines/Pipelines/Validation/IInputValidator.cs new file mode 100644 index 00000000000..c517c740e8d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/IInputValidator.cs @@ -0,0 +1,18 @@ +using System.ComponentModel; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + /// + /// Provides a contract validators must implement to participate in input validation. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface IInputValidator + { + /// + /// Validates the input value using the provided context. + /// + /// The current input validation context + /// A result which indicates success or failure of the validation in addition to detailed reason on failure + InputValidationResult Validate(InputValidationContext context); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/InputValidationContext.cs b/src/Sdk/DTPipelines/Pipelines/Validation/InputValidationContext.cs new file mode 100644 index 00000000000..61ce8c88a6d --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/InputValidationContext.cs @@ -0,0 +1,73 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Expressions; +using GitHub.DistributedTask.Logging; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + /// + /// Provides the necessary context for performing input value validation. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class InputValidationContext + { + /// + /// Gets or sets an expression which should be used to validate . + /// + [DataMember(EmitDefaultValue = false)] + public String Expression + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not to evaluate the expression using . + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Evaluate + { + get; + set; + } + + /// + /// Gets or sets the options used during expression evalation. + /// + public EvaluationOptions EvaluationOptions + { + get; + set; + } + + /// + /// Gets or sets the secret masker implementation. + /// + public ISecretMasker SecretMasker + { + get; + set; + } + + /// + /// Gets or sets the trace writer implementation. + /// + public ITraceWriter TraceWriter + { + get; + set; + } + + /// + /// Gets or sets the value which should be validated. + /// + [DataMember(EmitDefaultValue = false)] + public String Value + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/InputValidationResult.cs b/src/Sdk/DTPipelines/Pipelines/Validation/InputValidationResult.cs new file mode 100644 index 00000000000..f7ce3ad0c6e --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/InputValidationResult.cs @@ -0,0 +1,43 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + /// + /// Provides information about the result of input validation. + /// + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class InputValidationResult + { + public InputValidationResult() + { + } + + /// + /// Gets or sets a value indicating whether or not the input value is valid. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsValid + { + get; + set; + } + + /// + /// Gets or sets a value indicating a detailed reason the input value is not valid. + /// + [DataMember(EmitDefaultValue = false)] + public String Reason + { + get; + set; + } + + /// + /// Provides a convenience property to return successful validation results. + /// + public static readonly InputValidationResult Succeeded = new InputValidationResult { IsValid = true }; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/InputValidator.cs b/src/Sdk/DTPipelines/Pipelines/Validation/InputValidator.cs new file mode 100644 index 00000000000..9cfe8ce73c7 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/InputValidator.cs @@ -0,0 +1,44 @@ +using System; +using System.ComponentModel; +using GitHub.DistributedTask.Expressions; +using GitHub.DistributedTask.Pipelines.Expressions; +using GitHub.DistributedTask.WebApi; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + /// + /// Provides n validator implementation for task inputs. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public class InputValidator : IInputValidator + { + public InputValidationResult Validate(InputValidationContext context) + { + if (String.IsNullOrEmpty(context.Expression)) + { + return InputValidationResult.Succeeded; + } + + var result = new InputValidationResult(); + try + { + var parser = new ExpressionParser(); + var tree = parser.CreateTree(context.Expression, context.TraceWriter, namedValues: InputValidationConstants.NamedValues, functions: InputValidationConstants.Functions); + if (context.Evaluate) + { + result.IsValid = tree.Evaluate(context.TraceWriter, context.SecretMasker, context, context.EvaluationOptions); + } + else + { + result.IsValid = true; + } + } + catch (Exception ex) when (ex is ParseException || ex is RegularExpressionInvalidOptionsException || ex is NotSupportedException) + { + result.Reason = ex.Message; + } + + return result; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/NameValidation.cs b/src/Sdk/DTPipelines/Pipelines/Validation/NameValidation.cs new file mode 100644 index 00000000000..3c198346fbf --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/NameValidation.cs @@ -0,0 +1,59 @@ +using System; +using System.ComponentModel; +using System.Text; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class NameValidation + { + public static Boolean IsValid( + String name, + Boolean allowHyphens = false) + { + var result = true; + for (Int32 i = 0; i < name.Length; i++) + { + if ((name[i] >= 'a' && name[i] <= 'z') || + (name[i] >= 'A' && name[i] <= 'Z') || + (name[i] >= '0' && name[i] <= '9' && i > 0) || + (name[i] == '_') || + (allowHyphens && name[i] == '-' && i > 0)) + { + continue; + } + else + { + result = false; + break; + } + } + + return result; + } + + public static String Sanitize( + String name, + Boolean allowHyphens = false) + { + if (name == null) + { + return String.Empty; + } + + var sb = new StringBuilder(); + for (Int32 i = 0; i < name.Length; i++) + { + if ((name[i] >= 'a' && name[i] <= 'z') || + (name[i] >= 'A' && name[i] <= 'Z') || + (name[i] >= '0' && name[i] <= '9' && sb.Length > 0) || + (name[i] == '_') || + (allowHyphens && name[i] == '-' && sb.Length > 0)) + { + sb.Append(name[i]); + } + } + return sb.ToString(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/ScriptTaskValidator.cs b/src/Sdk/DTPipelines/Pipelines/Validation/ScriptTaskValidator.cs new file mode 100644 index 00000000000..52452096d96 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/ScriptTaskValidator.cs @@ -0,0 +1,198 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text.RegularExpressions; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + /// + /// Validates script tasks for bad tokens. For best performance, create one instance and reuse - this is + /// thread safe. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ScriptTaskValidator + { + /// + /// If supplied, combined with to form a single set of bad tokens. + /// + public ScriptTaskValidator(IBadTokenProvider clientTokenPrv = null) + { + var regexToMatch = new HashSet(RegexPatternComparer.Instance); + var tokenToMatch = new HashSet(StringComparer.OrdinalIgnoreCase); + + var tokenPrvs = new List(2) { BaseBadTokenProvider.Instance }; + if (clientTokenPrv != null) + { + tokenPrvs.Add(clientTokenPrv); + } + + foreach (IBadTokenProvider tokenPrv in tokenPrvs) + { + foreach (string pattern in tokenPrv.GetRegexPatternsToMatch()) + { + regexToMatch.Add( + new Regex(pattern, RegexOptions.Compiled, matchTimeout: TimeSpan.FromMilliseconds(100))); + } + + foreach (string staticToken in tokenPrv.GetStaticTokensToMatch()) + { + tokenToMatch.Add(staticToken); + } + } + + m_regexesToMatch = regexToMatch.ToArray(); + m_stringsToMatch = tokenToMatch.ToArray(); + } + + /// + /// Check a string for tokens containing "banned" patterns. This method is thread safe. + /// + public bool HasBadParamOrArgument( + string exeAndArgs, + out string matchedPattern, + out string matchedToken) + { + ArgumentUtility.CheckForNull(exeAndArgs, nameof(exeAndArgs)); + + string[] args = exeAndArgs.Split(); + + // Using for loops b/c they are measurably faster than foreach and this is n^2 + for (int i = 0; i < args.Length; i++) + { + string arg = args[i]; + + // Check static matches + for (int j = 0; j < m_stringsToMatch.Length; j++) + { + string toTest = m_stringsToMatch[j]; + if (arg.IndexOf(toTest, StringComparison.OrdinalIgnoreCase) >= 0) + { + matchedPattern = toTest; + matchedToken = arg; + + return true; + } + } + + // Check regexes + for (int j = 0; j < m_regexesToMatch.Length; j++) + { + Regex toTest = m_regexesToMatch[j]; + if (toTest.IsMatch(arg)) + { + matchedPattern = toTest.ToString(); + matchedToken = arg; + + return true; + } + } + } + + matchedPattern = null; + matchedToken = null; + return false; + } + + // These are arrays for max perf when enumerating/indexing + private readonly Regex[] m_regexesToMatch; + private readonly string[] m_stringsToMatch; + + public interface IBadTokenProvider + { + IEnumerable GetRegexPatternsToMatch(); + + IEnumerable GetStaticTokensToMatch(); + } + + /// + /// Static set of bad tokens we know about. + /// + private sealed class BaseBadTokenProvider : IBadTokenProvider + { + private BaseBadTokenProvider() + { + } + + public IEnumerable GetRegexPatternsToMatch() + { + // https://en.wikipedia.org/wiki/Base58 + const string base58charPattern = "[1-9a-km-zA-HJ-NP-Z]"; + + // We expect arguments to begin with whitespace (i.e. --config-option bla) or an = (i.e. + // --config-option=bla). If whitespace, we'll split the string so there is none to start. + const string beginTokenDelimiter = "(^|=)"; + + // We always expect arguments to end with whitespace, so any match will be the end of the string + const string endTokenDelimiter = "$"; + + string wrapInDelimeters(string argument) + { + return beginTokenDelimiter + argument + endTokenDelimiter; + } + + // Avoid patterns than can cause catastrophic backtracking for perf reasons + // https://www.regular-expressions.info/catastrophic.html + return new[] + { + // Monero wallets. See https://moneroaddress.org/ + // http://monero.wikia.com/wiki/Address_validation + wrapInDelimeters("4" + base58charPattern + "{94}"), + wrapInDelimeters("4" + base58charPattern + "{105}"), + + // Bitcoin wallets. See https://en.bitcoin.it/wiki/Address + // Starts with 1 or 3, total 33-35 base58 chars + wrapInDelimeters("[1-3]" + base58charPattern + "{32,34}"), + // Starts with bc[1-16], then 39(?) to 87 (90-3) base32 chars + // See: https://en.bitcoin.it/wiki/Bech32 + wrapInDelimeters("bc[0-9]{1,2}([0-9a-zA-Z]){39}"), + wrapInDelimeters("bc[0-9]{1,2}([0-9a-zA-Z]){59}"), + }; + } + + public IEnumerable GetStaticTokensToMatch() + { + return new[] + { + // Begin known mining pools + "xmr.suprnova.cc", + "MoneroOcean.stream", + "supportXMR.com", + "xmr.nanopool.org", + "monero.hashvault.pro", + "MoriaXMR.com", + "xmrpool.", + "minergate.com", + "viaxmr.com", + "xmr.suprnova.cc", + // End known mining pools + + // Probable mining argument + "--donate-level", + + // Other probable mining processes + "cpuminer", + "cryptonight", + "sgminer", + "xmrig", + "nheqminer" + }; + } + + public static readonly IBadTokenProvider Instance = new BaseBadTokenProvider(); + } + + private sealed class RegexPatternComparer : IEqualityComparer + { + private RegexPatternComparer() + { + } + + public bool Equals(Regex x, Regex y) => x.ToString() == y.ToString(); + public int GetHashCode(Regex obj) => obj.GetHashCode(); + + public static readonly IEqualityComparer Instance = new RegexPatternComparer(); + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Validation/ValidationResult.cs b/src/Sdk/DTPipelines/Pipelines/Validation/ValidationResult.cs new file mode 100644 index 00000000000..00ed51cd958 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Validation/ValidationResult.cs @@ -0,0 +1,110 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines.Validation +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class ValidationResult + { + public PipelineEnvironment Environment + { + get; + internal set; + } + + public IList Errors + { + get + { + if (m_errors == null) + { + m_errors = new List(); + } + return m_errors; + } + } + + public PipelineResources ReferencedResources + { + get + { + if (m_referencedResources == null) + { + m_referencedResources = new PipelineResources(); + } + return m_referencedResources; + } + } + + public PipelineResources UnauthorizedResources + { + get + { + if (m_unauthorizedResources == null) + { + m_unauthorizedResources = new PipelineResources(); + } + return m_unauthorizedResources; + } + } + + internal void AddQueueReference( + Int32 id, + String name) + { + if (id != 0) + { + this.ReferencedResources.Queues.Add(new AgentQueueReference { Id = id }); + } + else if (!String.IsNullOrEmpty(name)) + { + this.ReferencedResources.Queues.Add(new AgentQueueReference { Name = name }); + } + } + + internal void AddPoolReference( + Int32 id, + String name) + { + if (id != 0) + { + this.ReferencedResources.Pools.Add(new AgentPoolReference { Id = id }); + } + else if (!String.IsNullOrEmpty(name)) + { + this.ReferencedResources.Pools.Add(new AgentPoolReference { Name = name }); + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_errors?.Count == 0) + { + m_errors = null; + } + + if (m_referencedResources?.Count == 0) + { + m_referencedResources = null; + } + + if (m_unauthorizedResources?.Count == 0) + { + m_unauthorizedResources = null; + } + } + + [DataMember(Name = "Errors", EmitDefaultValue = false)] + private List m_errors; + + [DataMember(Name = "ReferencedResources", EmitDefaultValue = false)] + private PipelineResources m_referencedResources; + + [DataMember(Name = "UnauthorizedResources", EmitDefaultValue = false)] + private PipelineResources m_unauthorizedResources; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/Variable.cs b/src/Sdk/DTPipelines/Pipelines/Variable.cs new file mode 100644 index 00000000000..6b99a03219a --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/Variable.cs @@ -0,0 +1,50 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class Variable : IVariable + { + public Variable() + { + } + + private Variable(Variable variableToClone) + { + this.Name = variableToClone.Name; + this.Secret = variableToClone.Secret; + this.Value = variableToClone.Value; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean Secret + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Value + { + get; + set; + } + + public Variable Clone() + { + return new Variable(this); + } + + VariableType IVariable.Type => VariableType.Inline; + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/VariableGroupReference.cs b/src/Sdk/DTPipelines/Pipelines/VariableGroupReference.cs new file mode 100644 index 00000000000..46e98b82f61 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/VariableGroupReference.cs @@ -0,0 +1,63 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class VariableGroupReference : ResourceReference, IVariable + { + public VariableGroupReference() + { + } + + private VariableGroupReference(VariableGroupReference referenceToCopy) + : base(referenceToCopy) + { + this.Id = referenceToCopy.Id; + this.GroupType = referenceToCopy.GroupType; + this.SecretStore = referenceToCopy.SecretStore?.Clone(); + } + + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String GroupType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public SecretStoreConfiguration SecretStore + { + get; + set; + } + + public VariableGroupReference Clone() + { + return new VariableGroupReference(this); + } + + public override String ToString() + { + return base.ToString() ?? this.Id.ToString(); + } + + [DataMember(Name = nameof(Type))] + VariableType IVariable.Type + { + get + { + return VariableType.Group; + } + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/VariableGroupStore.cs b/src/Sdk/DTPipelines/Pipelines/VariableGroupStore.cs new file mode 100644 index 00000000000..3ea48d876c0 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/VariableGroupStore.cs @@ -0,0 +1,187 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public class VariableGroupStore : IVariableGroupStore + { + public VariableGroupStore( + IList resources, + IVariableGroupResolver resolver = null, + params IVariableValueProvider[] valueProviders) + { + this.Resolver = resolver; + Add(resources?.ToArray()); + + if (valueProviders?.Length > 0) + { + m_valueProviders = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var valueProvider in valueProviders) + { + if (!m_valueProviders.TryAdd(valueProvider.GroupType, valueProvider)) + { + throw new ArgumentException($"Group type {valueProvider.GroupType} cannot have more than one provider", nameof(valueProviders)); + } + } + } + } + + /// + /// Get the variable group resolver configured for this store. + /// + public IVariableGroupResolver Resolver + { + get; + } + + public IList GetAuthorizedReferences() + { + return m_resourcesById.Values.Select(x => new VariableGroupReference { Id = x.Id }).ToList(); + } + + public VariableGroup Get(VariableGroupReference reference) + { + if (reference == null) + { + return null; + } + + var referenceId = reference.Id; + var referenceName = reference.Name?.Literal; + if (referenceId == 0 && String.IsNullOrEmpty(referenceName)) + { + return null; + } + + VariableGroup authorizedResource = null; + if (referenceId != 0) + { + if (m_resourcesById.TryGetValue(referenceId, out authorizedResource)) + { + return authorizedResource; + } + } + else if (!String.IsNullOrEmpty(referenceName)) + { + if (m_resourcesByName.TryGetValue(referenceName, out List matchingResources)) + { + if (matchingResources.Count > 1) + { + throw new AmbiguousResourceSpecificationException(PipelineStrings.AmbiguousVariableGroupSpecification(referenceName)); + } + + return matchingResources[0]; + } + } + + // If we have an authorizer then attempt to authorize the reference for use + authorizedResource = this.Resolver?.Resolve(reference); + if (authorizedResource != null) + { + Add(authorizedResource); + } + + return authorizedResource; + } + + public IList GetPreSteps( + IPipelineContext context, + IReadOnlyList steps) + { + if (context.ReferencedResources.VariableGroups.Count == 0) + { + return null; + } + + // If the environment version is 1 and it's a build context we should inject + if (context.EnvironmentVersion < 2 && context is PipelineExecutionContext) + { + return null; + } + + var newSteps = new List(); + foreach (var group in context.ReferencedResources.VariableGroups.Where(x => x.SecretStore != null && x.SecretStore.Keys.Count > 0)) + { + // Only inject a task if the provider supports task injection for the current context + var valueProvider = GetValueProvider(group); + if (valueProvider != null && !valueProvider.ShouldGetValues(context)) + { + newSteps.AddRangeIfRangeNotNull(valueProvider.GetSteps(context, group, group.SecretStore.Keys)); + } + } + + return newSteps; + } + + public Dictionary> GetPostTaskSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new Dictionary>(); + } + + public IList GetPostSteps( + IPipelineContext context, + IReadOnlyList steps) + { + return new List(); + } + + public Boolean ResolveStep( + IPipelineContext context, + JobStep step, + out IList resolvedSteps) + { + resolvedSteps = new List(); + return false; + } + + /// + /// Gets the value provider which may be used to retrieve values for the variable group from the data store. + /// + /// The target variable group + /// A provider suitable for retrieving values from the variable group + public IVariableValueProvider GetValueProvider(VariableGroupReference group) + { + if (m_valueProviders != null && m_valueProviders.TryGetValue(group.GroupType, out var valueProvider)) + { + return valueProvider; + } + return null; + } + + private void Add(params VariableGroup[] resources) + { + if (resources?.Length > 0) + { + foreach (var resource in resources) + { + if (m_resourcesById.TryGetValue(resource.Id, out _)) + { + continue; + } + + m_resourcesById.Add(resource.Id, resource); + + if (!m_resourcesByName.TryGetValue(resource.Name, out List resourcesByName)) + { + resourcesByName = new List(); + m_resourcesByName.Add(resource.Name, resourcesByName); + } + + resourcesByName.Add(resource); + } + } + } + + private readonly Dictionary m_valueProviders; + private readonly Dictionary m_resourcesById = new Dictionary(); + private readonly Dictionary> m_resourcesByName = new Dictionary>(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/VariablesDictionary.cs b/src/Sdk/DTPipelines/Pipelines/VariablesDictionary.cs new file mode 100644 index 00000000000..71b6d170c45 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/VariablesDictionary.cs @@ -0,0 +1,355 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Linq; +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.Pipelines +{ + /// + /// Provides a mechansim for modeling a variable dictionary as a simple string dictionary for expression + /// evaluation. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class VariablesDictionary : IDictionary, IDictionary + { + /// + /// Initializes a new VariablesDictionary instance with an empty variable set. + /// + public VariablesDictionary() + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + /// + /// Initializes a new VariablesDictionary instance using the specified dictionary for initialization. + /// + /// The source from which to copy + public VariablesDictionary(VariablesDictionary copyFrom) + : this(copyFrom, false) + { + } + + /// + /// Initializes a new VariablesDictionary instance using the specified dictionary for initialization. + /// + /// The source from which to copy + public VariablesDictionary(IDictionary copyFrom) + : this(copyFrom?.ToDictionary(x => x.Key, x => new VariableValue { Value = x.Value }, StringComparer.OrdinalIgnoreCase), false) + { + } + + /// + /// Initializes a new VariablesDictionary instance using the specified dictionary for initialization. + /// + /// The source from which to copy + public VariablesDictionary(IDictionary copyFrom) + : this(copyFrom, false) + { + } + + private VariablesDictionary( + IDictionary copyFrom, + Boolean readOnly) + { + ArgumentUtility.CheckForNull(copyFrom, nameof(copyFrom)); + + if (readOnly) + { + m_variables = new ReadOnlyDictionary(copyFrom); + } + else + { + m_variables = new Dictionary(copyFrom, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Gets the set of secrets which were accessed. + /// + public HashSet SecretsAccessed + { + get + { + return m_secretsAccessed; + } + } + + public VariableValue this[String key] + { + get + { + if (!m_variables.TryGetValue(key, out VariableValue variableValue)) + { + throw new KeyNotFoundException(key); + } + + if (variableValue.IsSecret) + { + m_secretsAccessed.Add(key); + } + + return variableValue; + } + set + { + m_variables[key] = value; + } + } + + public ICollection Keys + { + get + { + return m_variables.Keys; + } + } + + public ICollection Values + { + get + { + return m_variables.Values; + } + } + + public Int32 Count + { + get + { + return m_variables.Count; + } + } + + public Boolean IsReadOnly + { + get + { + return m_variables.IsReadOnly; + } + } + + public void Add( + String key, + VariableValue value) + { + m_variables.Add(key, value); + } + + public void Add(KeyValuePair item) + { + m_variables.Add(item); + } + + public VariablesDictionary AsReadOnly() + { + if (m_variables.IsReadOnly) + { + return this; + } + + return new VariablesDictionary(m_variables, true); + } + + public void Clear() + { + m_variables.Clear(); + } + + public Boolean Contains(KeyValuePair item) + { + return m_variables.Contains(item); + } + + public Boolean ContainsKey(String key) + { + return m_variables.ContainsKey(key); + } + + public void CopyTo( + KeyValuePair[] array, + Int32 arrayIndex) + { + m_variables.CopyTo(array, arrayIndex); + } + + public IEnumerator> GetEnumerator() + { + return m_variables.GetEnumerator(); + } + + public Boolean Remove(String key) + { + return m_variables.Remove(key); + } + + public Boolean Remove(KeyValuePair item) + { + return m_variables.Remove(item); + } + + public Boolean TryGetValue( + String key, + out VariableValue value) + { + if (m_variables.TryGetValue(key, out value)) + { + if (value.IsSecret) + { + m_secretsAccessed.Add(key); + } + + return true; + } + + return false; + } + + ICollection IDictionary.Keys + { + get + { + return m_variables.Keys; + } + } + + ICollection IDictionary.Values + { + get + { + return m_variables.Select(x => x.Value?.Value).ToArray(); + } + } + + Int32 ICollection>.Count + { + get + { + return m_variables.Count; + } + } + + Boolean ICollection>.IsReadOnly + { + get + { + return m_variables.IsReadOnly; + } + } + + String IDictionary.this[String key] + { + get + { + if (!m_variables.TryGetValue(key, out VariableValue variableValue)) + { + throw new KeyNotFoundException(key); + } + + if (variableValue.IsSecret) + { + m_secretsAccessed.Add(key); + } + + return variableValue.Value; + } + set + { + if (!m_variables.TryGetValue(key, out VariableValue existingValue)) + { + m_variables.Add(key, value); + } + else + { + // Preserve whether or not this variable value is a secret + existingValue.Value = value; + } + } + } + + IEnumerator IEnumerable.GetEnumerator() + { + return m_variables.GetEnumerator(); + } + + IEnumerator> IEnumerable>.GetEnumerator() + { + foreach (var variable in m_variables) + { + yield return new KeyValuePair(variable.Key, variable.Value?.Value); + } + } + + Boolean IDictionary.TryGetValue( + String key, + out String value) + { + if (m_variables.TryGetValue(key, out VariableValue variableValue)) + { + if (variableValue.IsSecret) + { + m_secretsAccessed.Add(key); + } + + value = variableValue.Value; + return true; + } + + value = null; + return false; + } + + Boolean IDictionary.ContainsKey(String key) + { + return m_variables.ContainsKey(key); + } + + void IDictionary.Add( + String key, + String value) + { + m_variables.Add(key, value); + } + + Boolean IDictionary.Remove(String key) + { + return m_variables.Remove(key); + } + + void ICollection>.Add(KeyValuePair item) + { + m_variables.Add(new KeyValuePair(item.Key, item.Value)); + } + + void ICollection>.Clear() + { + m_variables.Clear(); + } + + Boolean ICollection>.Contains(KeyValuePair item) + { + return m_variables.Contains(new KeyValuePair(item.Key, item.Value)); + } + + void ICollection>.CopyTo( + KeyValuePair[] array, + Int32 arrayIndex) + { + foreach (var variable in m_variables) + { + array[arrayIndex++] = new KeyValuePair(variable.Key, variable.Value?.Value); + } + } + + Boolean ICollection>.Remove(KeyValuePair item) + { + return m_variables.Remove(new KeyValuePair(item.Key, item.Value)); + } + + private readonly HashSet m_secretsAccessed = new HashSet(StringComparer.OrdinalIgnoreCase); + private IDictionary m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/WorkspaceMapping.cs b/src/Sdk/DTPipelines/Pipelines/WorkspaceMapping.cs new file mode 100644 index 00000000000..fb193dd5d29 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/WorkspaceMapping.cs @@ -0,0 +1,71 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class WorkspaceMapping + { + /// + /// The map/cloak in tfvc. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Exclude + { + get; + set; + } + + /// + /// The server path. + /// + [DataMember(EmitDefaultValue = false)] + public String ServerPath + { + get; + set; + } + + /// + /// The local path. + /// + [DataMember(EmitDefaultValue = false)] + public String LocalPath + { + get; + set; + } + + /// + /// The revision in svn. + /// + [DataMember(EmitDefaultValue = false)] + public String Revision + { + get; + set; + } + + /// + /// The depth in svn. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Depth + { + get; + set; + } + + /// + /// Indicates whether to ignore externals in svn. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IgnoreExternals + { + get; + set; + } + } +} diff --git a/src/Sdk/DTPipelines/Pipelines/WorkspaceOptions.cs b/src/Sdk/DTPipelines/Pipelines/WorkspaceOptions.cs new file mode 100644 index 00000000000..2fbab31dd68 --- /dev/null +++ b/src/Sdk/DTPipelines/Pipelines/WorkspaceOptions.cs @@ -0,0 +1,32 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.Pipelines +{ + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public class WorkspaceOptions + { + public WorkspaceOptions() + { + } + + private WorkspaceOptions(WorkspaceOptions optionsToCopy) + { + this.Clean = optionsToCopy.Clean; + } + + [DataMember(EmitDefaultValue = false)] + public String Clean + { + get; + set; + } + + public WorkspaceOptions Clone() + { + return new WorkspaceOptions(this); + } + } +} diff --git a/src/Sdk/DTPipelines/workflow-v1.0.json b/src/Sdk/DTPipelines/workflow-v1.0.json new file mode 100644 index 00000000000..02741c871c5 --- /dev/null +++ b/src/Sdk/DTPipelines/workflow-v1.0.json @@ -0,0 +1,646 @@ +{ + "version": "workflow-v1.0", + + "definitions": { + + "workflow-root": { + "description": "Workflow file", + "mapping": { + "properties": { + "on": "any", + "name": "string", + "env": "workflow-env", + "jobs": "jobs" + } + } + }, + + "steps-template-root": { + "description": "Steps template file", + "mapping": { + "properties": { + "inputs": "steps-template-inputs", + "outputs": "steps-template-outputs", + "steps": "steps-in-template" + } + } + }, + + "steps-scope-inputs": { + "description": "Used when evaluating steps scope inputs", + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "steps-scope-input-value" + } + }, + + "steps-scope-input-value": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "one-of": [ + "string", + "sequence", + "mapping" + ] + }, + + "steps-scope-outputs": { + "description": "Used when evaluating steps scope outputs", + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "steps-scope-output-value" + } + }, + + "steps-scope-output-value": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "string": {} + }, + + "steps-template-inputs": { + "description": "Allowed inputs in a steps template", + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "steps-template-input-value" + } + }, + + "steps-template-input-value": { + "description": "Default input values for a steps template", + "context": [ + "github", + "strategy", + "matrix" + ], + "one-of": [ + "string", + "sequence", + "mapping" + ] + }, + + "steps-template-outputs": { + "description": "Output mapping for a steps template", + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "steps-template-output-value" + } + }, + + "steps-template-output-value": { + "description": "Output values for a steps template", + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "string": {} + }, + + "workflow-env": { + "context": [ + "github", + "secrets" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "jobs": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "job" + } + }, + + "job": { + "mapping": { + "properties": { + "needs": "needs", + "if": "string", + "strategy": "strategy", + "name": "string-strategy-context", + "runs-on": "runs-on", + "timeout-minutes": "number-strategy-context", + "cancel-timeout-minutes": "number-strategy-context", + "continue-on-error": "boolean", + "container": "container", + "services": "services", + "env": "job-env", + "steps": "steps" + } + } + }, + + "needs": { + "one-of": [ + "sequence-of-non-empty-string", + "non-empty-string" + ] + }, + + "strategy": { + "context": [ + "github" + ], + "mapping": { + "properties": { + "fail-fast": "boolean", + "max-parallel": "number", + "matrix": "matrix" + } + } + }, + + "matrix": { + "mapping": { + "properties": { + "include": "matrix-include", + "exclude": "matrix-exclude" + }, + "loose-key-type": "non-empty-string", + "loose-value-type": "sequence" + } + }, + + "matrix-include": { + "sequence": { + "item-type": "matrix-include-item" + } + }, + + "matrix-include-item": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "any" + } + }, + + "matrix-exclude": { + "sequence": { + "item-type": "matrix-exclude-item" + } + }, + + "matrix-exclude-item": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "matrix-exclude-filter-item" + } + }, + + "matrix-exclude-filter-item": { + "one-of": [ + "string", + "matrix-exclude-mapping-filter" + ] + }, + + "matrix-exclude-mapping-filter": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "matrix-exclude-filter-item" + } + }, + + "runs-on": { + "context": [ + "github", + "strategy", + "matrix" + ], + "one-of": [ + "runs-on-string", + "runs-on-mapping" + ] + }, + + "runs-on-string": { + "string": { + "require-non-empty": true + } + }, + + "runs-on-mapping": { + "mapping": { + "properties": { + "pool": "non-empty-string" + } + } + }, + + "job-env": { + "context": [ + "github", + "secrets", + "strategy", + "matrix" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "steps": { + "sequence": { + "item-type": "steps-item" + } + }, + + "steps-in-template": { + "sequence": { + "item-type": "steps-item-in-template" + } + }, + + "steps-item": { + "one-of": [ + "run-step", + "regular-step", + "steps-template-reference" + ] + }, + + "steps-item-in-template": { + "one-of": [ + "run-step-in-template", + "regular-step-in-template", + "steps-template-reference-in-template" + ] + }, + + "run-step": { + "mapping": { + "properties": { + "name": "string-steps-context", + "id": "non-empty-string", + "if": "string", + "timeout-minutes": "number-steps-context", + "run": "string-steps-context", + "continue-on-error": "boolean-steps-context", + "env": "step-env", + "working-directory": "string-steps-context", + "shell": "non-empty-string" + } + } + }, + + "run-step-in-template": { + "mapping": { + "properties": { + "name": "string-steps-context-in-template", + "id": "non-empty-string", + "if": "string", + "timeout-minutes": "number-steps-context-in-template", + "run": "string-steps-context-in-template", + "continue-on-error": "boolean-steps-context-in-template", + "env": "step-env-in-template", + "working-directory": "string-steps-context-in-template", + "shell": "non-empty-string" + } + } + }, + + "regular-step": { + "mapping": { + "properties": { + "name": "string-steps-context", + "id": "non-empty-string", + "if": "string", + "continue-on-error": "boolean-steps-context", + "timeout-minutes": "number-steps-context", + "uses": "non-empty-string", + "with": "step-with", + "env": "step-env" + } + } + }, + + "regular-step-in-template": { + "mapping": { + "properties": { + "name": "string-steps-context-in-template", + "id": "non-empty-string", + "if": "string", + "continue-on-error": "boolean-steps-context-in-template", + "timeout-minutes": "number-steps-context-in-template", + "uses": "non-empty-string", + "with": "step-with-in-template", + "env": "step-env-in-template" + } + } + }, + + "steps-template-reference": { + "mapping": { + "properties": { + "template": "non-empty-string", + "id": "non-empty-string", + "inputs": "steps-template-reference-inputs" + } + } + }, + + "steps-template-reference-in-template": { + "mapping": { + "properties": { + "template": "non-empty-string", + "id": "non-empty-string", + "inputs": "steps-template-reference-inputs-in-template" + } + } + }, + + "steps-template-reference-inputs": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "steps-template-reference-inputs-in-template": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "step-env": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "step-env-in-template": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "step-with": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "container": { + "context": [ + "github", + "strategy", + "matrix" + ], + "one-of": [ + "non-empty-string", + "container-mapping" + ] + }, + + "container-mapping": { + "mapping": { + "properties": { + "image": "non-empty-string", + "options": "non-empty-string", + "env": "container-env", + "ports": "sequence-of-non-empty-string", + "volumes": "sequence-of-non-empty-string" + } + } + }, + + "services": { + "context": [ + "github", + "strategy", + "matrix" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "container" + } + }, + + "container-env": { + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "step-with-in-template": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "mapping": { + "loose-key-type": "non-empty-string", + "loose-value-type": "string" + } + }, + + "non-empty-string": { + "string": { + "require-non-empty": true + } + }, + + "sequence-of-non-empty-string": { + "sequence": { + "item-type": "non-empty-string" + } + }, + + "number-strategy-context": { + "context": [ + "github", + "strategy", + "matrix" + ], + "number": {} + }, + + "string-strategy-context": { + "context": [ + "github", + "strategy", + "matrix" + ], + "string": {} + }, + + "boolean-steps-context": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "boolean": {} + }, + + "boolean-steps-context-in-template": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "boolean": {} + }, + + "number-steps-context": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "number": {} + }, + + "number-steps-context-in-template": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "number": {} + }, + + "string-steps-context": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "job", + "runner", + "env" + ], + "string": {} + }, + + "string-steps-context-in-template": { + "context": [ + "github", + "strategy", + "matrix", + "secrets", + "steps", + "inputs", + "job", + "runner", + "env" + ], + "string": {} + } + } +} \ No newline at end of file diff --git a/src/Sdk/DTWebApi/WebApi/AgentJobRequestMessage.cs b/src/Sdk/DTWebApi/WebApi/AgentJobRequestMessage.cs new file mode 100644 index 00000000000..292e4077d45 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/AgentJobRequestMessage.cs @@ -0,0 +1,79 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + + public sealed class AgentJobRequestMessage : JobRequestMessage + { + [JsonConstructor] + internal AgentJobRequestMessage() : base(JobRequestMessageTypes.AgentJobRequest) + { + } + + public AgentJobRequestMessage( + TaskOrchestrationPlanReference plan, + TimelineReference timeline, + Guid jobId, + String jobName, + String jobRefName, + JobEnvironment environment, + IEnumerable tasks) + : base(JobRequestMessageTypes.AgentJobRequest, plan, timeline, jobId, jobName, jobRefName, environment) + { + m_tasks = new List(tasks); + } + + [DataMember] + public Int64 RequestId + { + get; + internal set; + } + + [DataMember] + public Guid LockToken + { + get; + internal set; + } + + [DataMember] + public DateTime LockedUntil + { + get; + internal set; + } + + public ReadOnlyCollection Tasks + { + get + { + if (m_tasks == null) + { + m_tasks = new List(); + } + return m_tasks.AsReadOnly(); + } + } + + public TaskAgentMessage GetAgentMessage() + { + var body = JsonUtility.ToString(this); + + return new TaskAgentMessage + { + Body = body, + MessageType = JobRequestMessageTypes.AgentJobRequest + }; + } + + [DataMember(Name = "Tasks", EmitDefaultValue = false)] + private List m_tasks; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/AgentRefreshMessage.cs b/src/Sdk/DTWebApi/WebApi/AgentRefreshMessage.cs new file mode 100644 index 00000000000..28c24998a95 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/AgentRefreshMessage.cs @@ -0,0 +1,49 @@ +using Newtonsoft.Json; +using System; +using System.Runtime.Serialization; + + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class AgentRefreshMessage + { + public static readonly String MessageType = "AgentRefresh"; + + [JsonConstructor] + internal AgentRefreshMessage() + { + } + + public AgentRefreshMessage( + Int32 agentId, + String targetVersion, + TimeSpan? timeout = null) + { + this.AgentId = agentId; + this.Timeout = timeout ?? TimeSpan.FromMinutes(60); + this.TargetVersion = targetVersion; + } + + [DataMember] + public Int32 AgentId + { + get; + private set; + } + + [DataMember] + public TimeSpan Timeout + { + get; + private set; + } + + [DataMember] + public String TargetVersion + { + get; + private set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/AuditAction.cs b/src/Sdk/DTWebApi/WebApi/AuditAction.cs new file mode 100644 index 00000000000..e133be9d242 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/AuditAction.cs @@ -0,0 +1,19 @@ +namespace GitHub.DistributedTask.WebApi +{ + using System.Runtime.Serialization; + + public enum AuditAction + { + [EnumMember] + Add = 1, + + [EnumMember] + Update = 2, + + [EnumMember] + Delete = 3, + + [EnumMember] + Undelete = 4 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableGroupProviderData.cs b/src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableGroupProviderData.cs new file mode 100644 index 00000000000..756d023263e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableGroupProviderData.cs @@ -0,0 +1,30 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class AzureKeyVaultVariableGroupProviderData : VariableGroupProviderData + { + [DataMember(EmitDefaultValue = true)] + public Guid ServiceEndpointId + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Vault + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime LastRefreshedOn + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableValue.cs b/src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableValue.cs new file mode 100644 index 00000000000..4bae3ac45ef --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/AzureKeyVaultVariableValue.cs @@ -0,0 +1,47 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class AzureKeyVaultVariableValue: VariableValue + { + public AzureKeyVaultVariableValue() + { + } + + public AzureKeyVaultVariableValue(AzureKeyVaultVariableValue value) + : this(value.Value, value.IsSecret, value.Enabled, value.ContentType, value.Expires) + { + } + + public AzureKeyVaultVariableValue(String value, Boolean isSecret, Boolean enabled, String contentType, DateTime? expires) + :base(value, isSecret) + { + Enabled = enabled; + ContentType = contentType; + Expires = expires; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean Enabled + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public String ContentType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime? Expires + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Demand.cs b/src/Sdk/DTWebApi/WebApi/Demand.cs new file mode 100644 index 00000000000..669fbff458f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Demand.cs @@ -0,0 +1,103 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using System.Text.RegularExpressions; +using GitHub.Services.Common; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [JsonConverter(typeof(DemandJsonConverter))] + public abstract class Demand + { + protected Demand( + String name, + String value) + { + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + this.Name = name; + this.Value = value; + } + + [DataMember] + public String Name + { + get; + private set; + } + + [DataMember(EmitDefaultValue = false)] + public String Value + { + get; + private set; + } + + public override sealed Boolean Equals(Object obj) + { + Demand demand = obj as Demand; + return demand != null && demand.ToString().Equals(this.ToString(), StringComparison.OrdinalIgnoreCase); + } + + public override sealed Int32 GetHashCode() + { + return this.ToString().ToUpperInvariant().GetHashCode(); + } + + public override sealed String ToString() + { + return GetExpression(); + } + + public abstract Demand Clone(); + + protected abstract String GetExpression(); + + public abstract Boolean IsSatisfied(IDictionary capabilities); + + public static Boolean TryParse( + String input, + out Demand demand) + { + demand = null; + + Match match = s_demandRegex.Match(input); + if (!match.Success) + { + return false; + } + + String name = match.Groups["name"].Value; + String opcode = match.Groups["opcode"].Value; + String value = match.Groups["value"].Value; + + if (String.IsNullOrEmpty(opcode)) + { + demand = new DemandExists(name); + } + else + { + switch (opcode) + { + case "equals": + demand = new DemandEquals(name, value); + break; + case "gtVersion": + demand = new DemandMinimumVersion(name, value); + break; + } + } + + return demand != null; + } + + public void Update(String value) + { + ArgumentUtility.CheckStringForNullOrEmpty(value, "value"); + this.Value = value; + } + + private static readonly Regex s_demandRegex = new Regex(@"^(?\S+)(\s+\-(?\S+)\s+(?.*))?$", RegexOptions.Compiled); + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DemandEquals.cs b/src/Sdk/DTWebApi/WebApi/DemandEquals.cs new file mode 100644 index 00000000000..a8da39e342c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DemandEquals.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class DemandEquals : Demand + { + public DemandEquals( + String name, + String value) + : base(name, value) + { + ArgumentUtility.CheckStringForNullOrEmpty(value, "value"); + } + + public override Demand Clone() + { + return new DemandEquals(this.Name, this.Value); + } + + protected override String GetExpression() + { + return String.Format(CultureInfo.InvariantCulture, "{0} -equals {1}", this.Name, this.Value); + } + + public override Boolean IsSatisfied(IDictionary capabilities) + { + String value; + return capabilities.TryGetValue(this.Name, out value) && this.Value.Equals(value, StringComparison.OrdinalIgnoreCase); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DemandExists.cs b/src/Sdk/DTWebApi/WebApi/DemandExists.cs new file mode 100644 index 00000000000..84a950c0f78 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DemandExists.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class DemandExists : Demand + { + public DemandExists(String name) + : base(name, null) + { + } + + public override Demand Clone() + { + return new DemandExists(this.Name); + } + + protected override String GetExpression() + { + return this.Name; + } + + public override Boolean IsSatisfied(IDictionary capabilities) + { + return capabilities.ContainsKey(this.Name); + } + + public new void Update(String value) + { + // Exists can not override value + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DemandJsonConverter.cs b/src/Sdk/DTWebApi/WebApi/DemandJsonConverter.cs new file mode 100644 index 00000000000..37c1e02113a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DemandJsonConverter.cs @@ -0,0 +1,45 @@ +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using System; +using System.Reflection; + +namespace GitHub.DistributedTask.WebApi +{ + internal sealed class DemandJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(Demand).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (existingValue == null && reader.TokenType == JsonToken.String) + { + Demand demand; + if (Demand.TryParse((String)reader.Value, out demand)) + { + existingValue = demand; + } + } + + return existingValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + if (value != null) + { + writer.WriteValue(value.ToString()); + } + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DemandMinimumVersion.cs b/src/Sdk/DTWebApi/WebApi/DemandMinimumVersion.cs new file mode 100644 index 00000000000..4cac2b22f6f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DemandMinimumVersion.cs @@ -0,0 +1,127 @@ +using GitHub.DistributedTask.Pipelines; +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class DemandMinimumVersion : Demand + { + public DemandMinimumVersion( + String name, + String value) + : base(name, value) + { + ArgumentUtility.CheckStringForNullOrEmpty(value, "value"); + } + + public override Demand Clone() + { + return new DemandMinimumVersion(this.Name, this.Value); + } + + protected override String GetExpression() + { + return String.Format(CultureInfo.InvariantCulture, "{0} -gtVersion {1}", this.Name, this.Value); + } + + public override Boolean IsSatisfied(IDictionary capabilities) + { + String value; + if (capabilities.TryGetValue(this.Name, out value)) + { + // return true if our version is less than or equal to the capability version from the agent + return CompareVersion(this.Value, value) <= 0; + } + + // same as capabilityVersion == null + return false; + } + + public static Int32 CompareVersion(String semanticVersion1, String semanticVersion2) + { + // compare == first - second (-1 means second is greater, 1 means first is greater, 0 means they are equal) + Version version1 = ParseVersion(semanticVersion1); + Version version2 = ParseVersion(semanticVersion2); + if (version1 == null && version2 == null) + { + // they are both null, so they are equal + return 0; + } + else if (version1 == null) + { + // version2 is greater + return -1; + } + else if (version2 == null) + { + // version1 is greater + return 1; + } + + return version1.CompareTo(version2); + } + + /// + /// Gets the minimum agent version demand from the specified set of demands. Agent version demands are removed + /// from the input set. + /// + /// The demands + /// The highest minimum version required based in the input set + public static DemandMinimumVersion MaxAndRemove(ISet demands) + { + DemandMinimumVersion minAgentVersion = null; + var demandsCopy = demands.Where(x => x.Name.Equals(PipelineConstants.AgentVersionDemandName, StringComparison.OrdinalIgnoreCase)).OfType().ToList(); + foreach (var demand in demandsCopy) + { + if (minAgentVersion == null || CompareVersion(demand.Value, minAgentVersion.Value) > 0) + { + minAgentVersion = demand; + } + + demands.Remove(demand); + } + + return minAgentVersion; + } + + public static DemandMinimumVersion Max(IEnumerable demands) + { + DemandMinimumVersion minAgentVersion = null; + foreach (var demand in demands.Where(x => x.Name.Equals(PipelineConstants.AgentVersionDemandName, StringComparison.OrdinalIgnoreCase)).OfType()) + { + if (minAgentVersion == null || CompareVersion(demand.Value, minAgentVersion.Value) > 0) + { + minAgentVersion = demand; + } + } + + return minAgentVersion; + } + + public static Version ParseVersion(String versionString) + { + Version version = null; + if (!String.IsNullOrEmpty(versionString)) + { + int index = versionString.IndexOf('-'); + if (index > 0) + { + versionString = versionString.Substring(0, index); + } + + if (!Version.TryParse(versionString, out version)) + { + // If we couldn't parse it, set it back to null + version = null; + } + } + + return version; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroup.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroup.cs new file mode 100644 index 00000000000..29dc1008586 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroup.cs @@ -0,0 +1,83 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment group. + /// + [DataContract] + public class DeploymentGroup : DeploymentGroupReference + { + /// + /// Number of deployment targets in the deployment group. + /// + [DataMember] + public Int32 MachineCount + { + get; + internal set; + } + + /// + /// List of deployment targets in the deployment group. + /// + public IList Machines + { + get + { + if (m_machines == null) + { + m_machines = new List(); + } + return m_machines; + } + internal set + { + m_machines = value; + } + } + + /// + /// Description of the deployment group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// List of unique tags across all deployment targets in the deployment group. + /// + public IList MachineTags + { + get + { + if (m_tags == null) + { + m_tags = new List(); + } + return m_tags; + } + internal set + { + m_tags = value; + } + } + + /// + /// List of deployment targets in the deployment group. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Machines")] + private IList m_machines; + + /// + /// List of unique tags across all deployment targets in the deployment group. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "MachineTags")] + private IList m_tags; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroupActionFilter.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroupActionFilter.cs new file mode 100644 index 00000000000..dbd9d8bc639 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroupActionFilter.cs @@ -0,0 +1,31 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// This is useful in getting a list of deployment groups, filtered for which caller has permissions to take a particular action. + /// + [Flags] + [DataContract] + public enum DeploymentGroupActionFilter + { + /// + /// All deployment groups. + /// + [EnumMember] + None = 0, + + /// + /// Only deployment groups for which caller has **manage** permission. + /// + [EnumMember] + Manage = 2, + + /// + /// Only deployment groups for which caller has **use** permission. + /// + [EnumMember] + Use = 16, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroupCreateParameter.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroupCreateParameter.cs new file mode 100644 index 00000000000..61ecb420ce5 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroupCreateParameter.cs @@ -0,0 +1,72 @@ +using GitHub.Services.WebApi; +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to create Deployment group. + /// + [DataContract] + public class DeploymentGroupCreateParameter + { + /// + /// Name of the deployment group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Description of the deployment group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// Identifier of the deployment pool in which deployment agents are registered. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 PoolId + { + get; + set; + } + + /// + /// Deployment pool in which deployment agents are registered. + /// This is obsolete. Kept for compatibility. Will be marked obsolete explicitly by M132. + /// + [DataMember(EmitDefaultValue = false)] + [ClientInternalUseOnly(OmitFromTypeScriptDeclareFile = false)] + public DeploymentGroupCreateParameterPoolProperty Pool + { + get; + set; + } + } + + /// + /// Properties of Deployment pool to create Deployment group. + /// + [DataContract] + public class DeploymentGroupCreateParameterPoolProperty + { + /// + /// Deployment pool identifier. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroupExpands.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroupExpands.cs new file mode 100644 index 00000000000..37ca5c1fefe --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroupExpands.cs @@ -0,0 +1,31 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to be included or expanded in deployment group objects. This is useful when getting a single or list of deployment grouops. + /// + [Flags] + [DataContract] + public enum DeploymentGroupExpands + { + /// + /// No additional properties. + /// + [EnumMember] + None = 0, + + /// + /// Deprecated: Include all the deployment targets. + /// + [EnumMember] + Machines = 2, + + /// + /// Include unique list of tags across all deployment targets. + /// + [EnumMember] + Tags = 4 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroupMetrics.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroupMetrics.cs new file mode 100644 index 00000000000..0efd3438ea8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroupMetrics.cs @@ -0,0 +1,62 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment group metrics. + /// + [DataContract] + public sealed class DeploymentGroupMetrics + { + /// + /// Deployment group. + /// + [DataMember] + public DeploymentGroupReference DeploymentGroup + { + get; + internal set; + } + + /// + /// List of deployment group properties. And types of metrics provided for those properties. + /// + [DataMember] + public MetricsColumnsHeader ColumnsHeader + { + get; + internal set; + } + + /// + /// Values of properties and the metrics. + /// E.g. 1: total count of deployment targets for which 'TargetState' is 'offline'. + /// E.g. 2: Average time of deployment to the deployment targets for which 'LastJobStatus' is 'passed' and 'TargetState' is 'online'. + /// + public IList Rows + { + get + { + if (m_rows == null) + { + m_rows = new List(); + } + + return m_rows; + } + internal set + { + m_rows = value; + } + } + + /// + /// Values of properties and the metrics. + /// E.g. 1: total count of deployment targets for which 'TargetState' is 'offline'. + /// E.g. 2: Average time of deployment to the deployment targets for which 'LastJobStatus' is 'passed' and 'TargetState' is 'online'. + /// + [DataMember(Name = "Rows")] + private IList m_rows; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroupReference.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroupReference.cs new file mode 100644 index 00000000000..bdd34601fdc --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroupReference.cs @@ -0,0 +1,90 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment group reference. This is useful for referring a deployment group in another object. + /// + [DataContract] + public class DeploymentGroupReference + { + [JsonConstructor] + public DeploymentGroupReference() + { + } + + private DeploymentGroupReference(DeploymentGroupReference referenceToClone) + { + this.Id = referenceToClone.Id; + this.Name = referenceToClone.Name; + + if (referenceToClone.Project != null) + { + this.Project = new ProjectReference + { + Id = referenceToClone.Project.Id, + Name = referenceToClone.Project.Name, + }; + } + + if (referenceToClone.Pool != null) + { + this.Pool = new TaskAgentPoolReference + { + Id = referenceToClone.Pool.Id, + IsHosted = referenceToClone.Pool.IsHosted, + Name = referenceToClone.Pool.Name, + PoolType = referenceToClone.Pool.PoolType, + Scope = referenceToClone.Pool.Scope, + }; + } + } + + /// + /// Deployment group identifier. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + internal set; + } + + /// + /// Project to which the deployment group belongs. + /// + [DataMember(EmitDefaultValue = false)] + public ProjectReference Project + { + get; + internal set; + } + + /// + /// Name of the deployment group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Deployment pool in which deployment agents are registered. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolReference Pool + { + get; + set; + } + + public virtual DeploymentGroupReference Clone() + { + return new DeploymentGroupReference(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentGroupUpdateParameter.cs b/src/Sdk/DTWebApi/WebApi/DeploymentGroupUpdateParameter.cs new file mode 100644 index 00000000000..be382a96766 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentGroupUpdateParameter.cs @@ -0,0 +1,32 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment group update parameter. + /// + [DataContract] + public class DeploymentGroupUpdateParameter + { + /// + /// Name of the deployment group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Description of the deployment group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentMachine.cs b/src/Sdk/DTWebApi/WebApi/DeploymentMachine.cs new file mode 100644 index 00000000000..786e2394ae8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentMachine.cs @@ -0,0 +1,99 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment target. + /// + [DataContract] + public class DeploymentMachine : ICloneable + { + public DeploymentMachine() + { + } + + private DeploymentMachine(DeploymentMachine machineToBeCloned) + { + this.Id = machineToBeCloned.Id; + this.Tags = (Tags == null) ? null : new List(machineToBeCloned.Tags); + this.Agent = machineToBeCloned.Agent?.Clone(); + } + + /// + /// Deployment target Identifier. + /// + [DataMember] + public Int32 Id + { + get; + set; + } + + /// + /// Tags of the deployment target. + /// + public IList Tags + { + get + { + return m_tags; + } + set + { + m_tags = value; + } + } + + /// + /// Deployment agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgent Agent + { + get; + set; + } + + public PropertiesCollection Properties + { + get + { + if (m_properties == null) + { + m_properties = new PropertiesCollection(); + } + + return m_properties; + } + internal set + { + m_properties = value; + } + } + + object ICloneable.Clone() + { + return this.Clone(); + } + + public DeploymentMachine Clone() + { + return new DeploymentMachine(this); + } + + /// + /// Tags of the deployment target. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Tags")] + private IList m_tags; + + /// + /// Properties of the deployment target. + /// + [DataMember(EmitDefaultValue = false, Name = "Properties")] + private PropertiesCollection m_properties; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentMachineExpands.cs b/src/Sdk/DTWebApi/WebApi/DeploymentMachineExpands.cs new file mode 100644 index 00000000000..732fac64a22 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentMachineExpands.cs @@ -0,0 +1,19 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [Flags] + [DataContract] + public enum DeploymentMachineExpands + { + [EnumMember] + None = 0, + + [EnumMember] + Capabilities = 2, + + [EnumMember] + AssignedRequest = 4 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentMachineGroup.cs b/src/Sdk/DTWebApi/WebApi/DeploymentMachineGroup.cs new file mode 100644 index 00000000000..5db57253beb --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentMachineGroup.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class DeploymentMachineGroup : DeploymentMachineGroupReference + { + [DataMember] + public Int32 Size + { + get; + internal set; + } + + public IList Machines + { + get + { + if (m_machines == null) + { + m_machines = new List(); + } + + return m_machines; + } + + internal set + { + m_machines = value; + } + } + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Machines")] + private IList m_machines; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentMachineGroupReference.cs b/src/Sdk/DTWebApi/WebApi/DeploymentMachineGroupReference.cs new file mode 100644 index 00000000000..8b1b05673da --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentMachineGroupReference.cs @@ -0,0 +1,37 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class DeploymentMachineGroupReference + { + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + internal set; + } + + [DataMember(EmitDefaultValue = false)] + public ProjectReference Project + { + get; + internal set; + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolReference Pool + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentPoolSummary.cs b/src/Sdk/DTWebApi/WebApi/DeploymentPoolSummary.cs new file mode 100644 index 00000000000..4b2cdaab7bc --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentPoolSummary.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment pool summary. + /// + [DataContract] + public sealed class DeploymentPoolSummary + { + /// + /// Deployment pool. + /// + [DataMember] + public TaskAgentPoolReference Pool + { + get; + internal set; + } + + /// + /// Number of deployment agents that are online. + /// + [DataMember] + public Int32 OnlineAgentsCount + { + get; + internal set; + } + + /// + /// Number of deployment agents that are offline. + /// + [DataMember] + public Int32 OfflineAgentsCount + { + get; + internal set; + } + + /// + /// Virtual machine Resource referring in pool. + /// + [DataMember] + public EnvironmentResourceReference Resource + { + get; + internal set; + } + + /// + /// List of deployment groups referring to the deployment pool. + /// + public IList DeploymentGroups + { + get + { + if (m_deploymentGroups == null) + { + m_deploymentGroups = new List(); + } + return m_deploymentGroups; + } + internal set + { + m_deploymentGroups = value; + } + } + + /// + /// List of deployment groups referring to the deployment pool. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "DeploymentGroups")] + private IList m_deploymentGroups; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentPoolSummaryExpands.cs b/src/Sdk/DTWebApi/WebApi/DeploymentPoolSummaryExpands.cs new file mode 100644 index 00000000000..63586be3c39 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentPoolSummaryExpands.cs @@ -0,0 +1,31 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to be included or expanded in deployment pool summary objects. This is useful when getting a single or list of deployment pool summaries. + /// + [Flags] + [DataContract] + public enum DeploymentPoolSummaryExpands + { + /// + /// No additional properties + /// + [EnumMember] + None = 0, + + /// + /// Include deployment groups referring to the deployment pool. + /// + [EnumMember] + DeploymentGroups = 2, + + /// + /// Include Resource referring to the deployment pool. + /// + [EnumMember] + Resource = 4 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentTargetExpands.cs b/src/Sdk/DTWebApi/WebApi/DeploymentTargetExpands.cs new file mode 100644 index 00000000000..a1b3e371e36 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentTargetExpands.cs @@ -0,0 +1,37 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to be included or expanded in deployment target objects. This is useful when getting a single or list of deployment targets. + /// + [Flags] + [DataContract] + public enum DeploymentTargetExpands + { + /// + /// No additional properties. + /// + [EnumMember] + None = 0, + + /// + /// Include capabilities of the deployment agent. + /// + [EnumMember] + Capabilities = 2, + + /// + /// Include the job request assigned to the deployment agent. + /// + [EnumMember] + AssignedRequest = 4, + + /// + /// Include the last completed job request of the deployment agent. + /// + [EnumMember] + LastCompletedRequest = 8 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DeploymentTargetUpdateParameter.cs b/src/Sdk/DTWebApi/WebApi/DeploymentTargetUpdateParameter.cs new file mode 100644 index 00000000000..8f2a4835fbe --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DeploymentTargetUpdateParameter.cs @@ -0,0 +1,41 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Deployment target update parameter. + /// + [DataContract] + public class DeploymentTargetUpdateParameter + { + /// + /// Identifier of the deployment target. + /// + [DataMember] + public Int32 Id + { + get; + set; + } + + /// + /// Tags of the deployment target.. + /// + public IList Tags + { + get + { + return m_tags; + } + set + { + m_tags = value; + } + } + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Tags")] + private IList m_tags; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/DiagnosticLogMetadata.cs b/src/Sdk/DTWebApi/WebApi/DiagnosticLogMetadata.cs new file mode 100644 index 00000000000..42985223542 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/DiagnosticLogMetadata.cs @@ -0,0 +1,36 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class DiagnosticLogMetadata + { + public DiagnosticLogMetadata(string agentName, int agentId, int poolId, string phaseName, string fileName, string phaseResult) + { + AgentName = agentName; + AgentId = agentId; + PoolId = poolId; + PhaseName = phaseName; + FileName = fileName; + PhaseResult = phaseResult; + } + + [DataMember] + public string AgentName { get; set; } + + [DataMember] + public int AgentId { get; set; } + + [DataMember] + public int PoolId { get; set; } + + [DataMember] + public string PhaseName { get; set; } + + [DataMember] + public string FileName { get; set; } + + [DataMember] + public string PhaseResult { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/EnableAccessTokenType.cs b/src/Sdk/DTWebApi/WebApi/EnableAccessTokenType.cs new file mode 100644 index 00000000000..e1a8f691386 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/EnableAccessTokenType.cs @@ -0,0 +1,10 @@ +namespace GitHub.DistributedTask.WebApi +{ + public enum EnableAccessTokenType + { + None, + Variable, + True = Variable, + SecretVariable, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentCreateParameter.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentCreateParameter.cs new file mode 100644 index 00000000000..35085a3962f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentCreateParameter.cs @@ -0,0 +1,35 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to create Environment. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class EnvironmentCreateParameter + { + /// + /// Name of the environment. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Description of the environment. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentDeploymentExecutionRecord.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentDeploymentExecutionRecord.cs new file mode 100644 index 00000000000..b37da3949b6 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentDeploymentExecutionRecord.cs @@ -0,0 +1,192 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// EnvironmentDeploymentExecutionRecord. + /// + [DataContract] + public class EnvironmentDeploymentExecutionRecord + { + /// + /// Id of the Environment deployment execution history record + /// + [DataMember] + public Int64 Id + { + get; + set; + } + + /// + /// Request identifier of the Environment deployment execution history record + /// + [DataMember] + public String RequestIdentifier + { + get; + set; + } + + /// + /// Id of the Environment + /// + [DataMember] + public Int32 EnvironmentId + { + get; + set; + } + + /// + /// Service owner Id + /// + [DataMember(EmitDefaultValue = false)] + public Guid ServiceOwner + { + get; + set; + } + + /// + /// Project Id + /// + [DataMember(EmitDefaultValue = false)] + public Guid ScopeId + { + get; + set; + } + + /// + /// Resource Id + /// + [DataMember(EmitDefaultValue = false)] + public Int32? ResourceId + { + get; + set; + } + + /// + /// Plan type of the environment deployment execution record + /// + [DataMember(EmitDefaultValue = false)] + public String PlanType + { + get; + set; + } + + /// + /// Plan Id + /// + [DataMember(EmitDefaultValue = false)] + public Guid PlanId + { + get; + set; + } + + /// + /// Stage name + /// + [DataMember(EmitDefaultValue = false)] + public String StageName + { + get; + set; + } + + /// + /// Job name + /// + [DataMember(EmitDefaultValue = false)] + public String JobName + { + get; + set; + } + + /// + /// Stage Attempt + /// + [DataMember(EmitDefaultValue = false)] + public Int32 StageAttempt + { + get; + set; + } + + /// + /// Job Attempt + /// + [DataMember(EmitDefaultValue = false)] + public Int32 JobAttempt + { + get; + set; + } + + /// + /// Definition of the environment deployment execution owner + /// + [DataMember(EmitDefaultValue = false)] + public TaskOrchestrationOwner Definition + { + get; + set; + } + + /// + /// Owner of the environment deployment execution record + /// + [DataMember(EmitDefaultValue = false)] + public TaskOrchestrationOwner Owner + { + get; + set; + } + + /// + /// Result of the environment deployment execution + /// + [DataMember(EmitDefaultValue = false)] + public TaskResult? Result + { + get; + set; + } + + /// + /// Queue time of the environment deployment execution + /// + [DataMember(EmitDefaultValue = false)] + public DateTime QueueTime + { + get; + set; + } + + /// + /// Start time of the environment deployment execution + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? StartTime + { + get; + set; + } + + /// + /// Finish time of the environment deployment execution + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentExpands.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentExpands.cs new file mode 100644 index 00000000000..e7bf1e79578 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentExpands.cs @@ -0,0 +1,25 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to be included or expanded in environment objects. This is useful when getting a single environment. + /// + [Flags] + [DataContract] + public enum EnvironmentExpands + { + /// + /// No additional properties + /// + [EnumMember] + None = 0, + + /// + /// Include resource references referring to the environment. + /// + [EnumMember] + ResourceReferences = 1 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentInstance.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentInstance.cs new file mode 100644 index 00000000000..6fb0710049b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentInstance.cs @@ -0,0 +1,116 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Environment. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class EnvironmentInstance + { + /// + /// Id of the Environment + /// + [DataMember] + public Int32 Id + { + get; + set; + } + + /// + /// Name of the Environment. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Description of the Environment. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// Identity reference of the user who created the Environment. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef CreatedBy + { + get; + set; + } + + /// + /// Creation time of the Environment + /// + [DataMember] + public DateTime CreatedOn + { + get; + set; + } + + /// + /// Identity reference of the user who last modified the Environment. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef LastModifiedBy + { + get; + set; + } + + /// + /// Last modified time of the Environment + /// + [DataMember] + public DateTime LastModifiedOn + { + get; + set; + } + + /// + /// List of resources + /// + public IList Resources + { + get + { + if (this.resources == null) + { + this.resources = new List(); + } + + return this.resources; + } + } + + /// + /// Resources that defined or used for this environment. + /// We use this for deployment job's resource authorization. + /// + public Pipelines.PipelineResources ReferencedResources + { + get; + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Resources")] + private IList resources; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentLinkedResourceReference.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentLinkedResourceReference.cs new file mode 100644 index 00000000000..ddc8ffedbb3 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentLinkedResourceReference.cs @@ -0,0 +1,34 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// EnvironmentLinkedResourceReference. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class EnvironmentLinkedResourceReference + { + /// + /// Id of the resource. + /// + [DataMember] + public String Id + { + get; + set; + } + + /// + /// Type of resource. + /// + [DataMember(EmitDefaultValue = false)] + public String TypeName + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentReference.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentReference.cs new file mode 100644 index 00000000000..75b2012c48b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentReference.cs @@ -0,0 +1,18 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class EnvironmentReference + { + [DataMember] + public Int32 Id { get; set; } + + [DataMember] + public String Name { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResource.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResource.cs new file mode 100644 index 00000000000..89d9f2c9441 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResource.cs @@ -0,0 +1,51 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public abstract class EnvironmentResource + { + [DataMember] + public Int32 Id { get; set; } + + [DataMember] + public String Name { get; set; } + + /// + /// Environment resource type + /// + [DataMember] + public EnvironmentResourceType Type { get; set; } + + [DataMember] + public IdentityRef CreatedBy { get; set; } + + [DataMember] + public DateTime CreatedOn { get; set; } + + [DataMember] + public IdentityRef LastModifiedBy { get; set; } + + [DataMember] + public DateTime LastModifiedOn { get; set; } + + [DataMember] + public EnvironmentReference EnvironmentReference { get; set; } + + protected EnvironmentResource() + { + Name = string.Empty; + + CreatedBy = new IdentityRef(); + + LastModifiedBy = new IdentityRef(); + + this.EnvironmentReference = new EnvironmentReference(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceReference.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceReference.cs new file mode 100644 index 00000000000..d8026696458 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceReference.cs @@ -0,0 +1,63 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// EnvironmentResourceReference. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class EnvironmentResourceReference + { + /// + /// Id of the resource. + /// + [DataMember] + public Int32 Id + { + get; + set; + } + + /// + /// Name of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Type of the resource. + /// + [DataMember(EmitDefaultValue = false)] + public EnvironmentResourceType Type + { + get; + set; + } + + /// + /// List of linked resources + /// + public IList LinkedResources + { + get + { + if (m_linkedResources == null) + { + m_linkedResources = new List(); + } + + return m_linkedResources; + } + } + + private IList m_linkedResources; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceType.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceType.cs new file mode 100644 index 00000000000..87d0be73dec --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentResourceType.cs @@ -0,0 +1,31 @@ +using System; +using System.ComponentModel; + + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// EnvironmentResourceType. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [Flags] + public enum EnvironmentResourceType + { + Undefined = 0, + + /// + /// Unknown resource type + /// + Generic = 1, + + /// + /// Virtual machine resource type + /// + VirtualMachine = 2, + + /// + /// Kubernetes resource type + /// + Kubernetes = 4 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentUpdateParameter.cs b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentUpdateParameter.cs new file mode 100644 index 00000000000..6fdf9fab855 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/EnvironmentUpdateParameter.cs @@ -0,0 +1,34 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Properties to update Environment. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class EnvironmentUpdateParameter + { + /// + /// Name of the environment. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Description of the environment. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/KubernetesResource.cs b/src/Sdk/DTWebApi/WebApi/Environment/KubernetesResource.cs new file mode 100644 index 00000000000..fc0cfee9c08 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/KubernetesResource.cs @@ -0,0 +1,20 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class KubernetesResource : EnvironmentResource + { + [DataMember] + public String Namespace { get; set; } + + [DataMember] + public String ClusterName { get; set; } + + [DataMember] + public Guid ServiceEndpointId { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/KubernetesResourceCreateParameters.cs b/src/Sdk/DTWebApi/WebApi/Environment/KubernetesResourceCreateParameters.cs new file mode 100644 index 00000000000..c400258c70a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/KubernetesResourceCreateParameters.cs @@ -0,0 +1,23 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class KubernetesResourceCreateParameters + { + [DataMember] + public String Name { get; set; } + + [DataMember] + public String Namespace { get; set; } + + [DataMember] + public String ClusterName { get; set; } + + [DataMember] + public Guid ServiceEndpointId { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachine.cs b/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachine.cs new file mode 100644 index 00000000000..242096ecefc --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachine.cs @@ -0,0 +1,41 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class VirtualMachine + { + [DataMember] + public Int32 Id { get; set; } + + [DataMember] + public TaskAgent Agent { get; set; } + + /// + /// List of tags + /// + public IList Tags + { + get + { + if (this.tags == null) + { + this.tags = new List(); + } + + return this.tags; + } + set + { + this.tags = value; + } + } + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Tags")] + private IList tags; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroup.cs b/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroup.cs new file mode 100644 index 00000000000..040b7a7e0cf --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroup.cs @@ -0,0 +1,14 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class VirtualMachineGroup : EnvironmentResource + { + [DataMember] + public Int32 PoolId { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroupCreateParameters.cs b/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroupCreateParameters.cs new file mode 100644 index 00000000000..7a3bee5c158 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Environment/VirtualMachineGroupCreateParameters.cs @@ -0,0 +1,14 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class VirtualMachineGroupCreateParameters + { + [DataMember] + public String Name { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Exceptions.cs b/src/Sdk/DTWebApi/WebApi/Exceptions.cs new file mode 100644 index 00000000000..c3615da7a6b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Exceptions.cs @@ -0,0 +1,2461 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [Serializable] + [ExceptionMapping("0.0", "3.0", "DistributedTaskException", "GitHub.DistributedTask.WebApi.DistributedTaskException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DistributedTaskException : VssServiceException + { + public DistributedTaskException(String message) + : base(message) + { + } + + public DistributedTaskException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected DistributedTaskException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class ServerJobFailureException : DistributedTaskException + { + public ServerJobFailureException(String message) + : base(message) + { + } + + public ServerJobFailureException(String message, Exception ex) + : base(message, ex) + { + } + + protected ServerJobFailureException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidTaskExecutionModeTypeException : DistributedTaskException + { + public InvalidTaskExecutionModeTypeException(String message) + : base(message) + { + } + + public InvalidTaskExecutionModeTypeException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidTaskExecutionModeTypeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class ServerExecutionHandlerNotFoundException : DistributedTaskException + { + public ServerExecutionHandlerNotFoundException(String message) + : base(message) + { + } + + public ServerExecutionHandlerNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected ServerExecutionHandlerNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskJsonNotFoundException", "GitHub.DistributedTask.WebApi.TaskJsonNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidBuildContributionsTarget : DistributedTaskException + { + public InvalidBuildContributionsTarget(String message) + : base(message) + { + } + + public InvalidBuildContributionsTarget(String message, Exception ex) + : base(message, ex) + { + } + protected InvalidBuildContributionsTarget(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "SecurityException", "GitHub.DistributedTask.WebApi.SecurityException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class SecurityException : DistributedTaskException + { + public SecurityException(String message) + : base(message) + { + } + + public SecurityException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected SecurityException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "AccessDeniedException", "GitHub.DistributedTask.WebApi.AccessDeniedException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AccessDeniedException : SecurityException + { + public AccessDeniedException(String message) + : base(message) + { + } + + public AccessDeniedException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected AccessDeniedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DataNotFoundException", "GitHub.DistributedTask.WebApi.DataNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DataNotFoundException : DistributedTaskException + { + public DataNotFoundException(String message) + : base(message) + { + } + + public DataNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected DataNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "DataSourceNotFoundException", "GitHub.DistributedTask.WebApi.DataSourceNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DataSourceNotFoundException : DistributedTaskException + { + public DataSourceNotFoundException(string message) : base(message) + { + } + + public DataSourceNotFoundException(string message, Exception innerException) : base(message, innerException) + { + } + + protected DataSourceNotFoundException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidDataSourceBindingException", "GitHub.DistributedTask.WebApi.InvalidDataSourceBindingException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidDataSourceBindingException : DistributedTaskException + { + public InvalidDataSourceBindingException(string message) : base(message) + { + } + + public InvalidDataSourceBindingException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidDataSourceBindingException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidServiceEndpointRequestException", "GitHub.DistributedTask.WebApi.InvalidServiceEndpointRequestException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidServiceEndpointRequestException : DistributedTaskException + { + public InvalidServiceEndpointRequestException(string message) : base(message) + { + } + + public InvalidServiceEndpointRequestException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidServiceEndpointRequestException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "EndpointNotFoundException", "GitHub.DistributedTask.WebApi.EndpointNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class EndpointNotFoundException : DistributedTaskException + { + public EndpointNotFoundException(String message) + : base(message) + { + } + + public EndpointNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EndpointNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidJsonPathResponseSelectorException : DistributedTaskException + { + public InvalidJsonPathResponseSelectorException(string message) + : base(message) + { + } + + public InvalidJsonPathResponseSelectorException(string message, Exception ex) + : base(message, ex) + { + } + + protected InvalidJsonPathResponseSelectorException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidPackageQueryException : DistributedTaskException + { + public InvalidPackageQueryException(string message) + : base(message) + { + } + + public InvalidPackageQueryException(string message, Exception ex) + : base(message, ex) + { + } + + protected InvalidPackageQueryException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidAuthorizationDetailsException", "GitHub.DistributedTask.WebApi.InvalidAuthorizationDetailsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidAuthorizationDetailsException : DistributedTaskException + { + public InvalidAuthorizationDetailsException(string message) : base(message) + { + } + + public InvalidAuthorizationDetailsException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidAuthorizationDetailsException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidEndpointResponseException", "GitHub.DistributedTask.WebApi.InvalidEndpointResponseException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidEndpointResponseException : DistributedTaskException + { + public InvalidEndpointResponseException(string message) : base(message) + { + } + + public InvalidEndpointResponseException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidEndpointResponseException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + public class InvalidTaskAgentPoolException : DistributedTaskException + { + public InvalidTaskAgentPoolException(string message) : base(message) + { + } + + public InvalidTaskAgentPoolException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidTaskAgentPoolException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + public class InvalidDeploymentMachineException : DistributedTaskException + { + public InvalidDeploymentMachineException(string message) : base(message) + { + } + + public InvalidDeploymentMachineException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidDeploymentMachineException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + public sealed class MetaTaskDefinitionExistsException : DistributedTaskException + { + public MetaTaskDefinitionExistsException(String message) + : base(message) + { + } + + public MetaTaskDefinitionExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private MetaTaskDefinitionExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupDraftExistsException : DistributedTaskException + { + public TaskGroupDraftExistsException(String message) + : base(message) + { + } + + public TaskGroupDraftExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupDraftExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupPreviewExistsException : DistributedTaskException + { + public TaskGroupPreviewExistsException(String message) + : base(message) + { + } + + public TaskGroupPreviewExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupPreviewExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupDisabledException : DistributedTaskException + { + public TaskGroupDisabledException(String message) + : base(message) + { + } + + public TaskGroupDisabledException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupDisabledException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskGroupIdConflictException : DistributedTaskException + { + public TaskGroupIdConflictException(String message) + : base(message) + { + } + + public TaskGroupIdConflictException(String message, Exception ex) + : base(message, ex) + { + } + + protected TaskGroupIdConflictException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class MetaTaskDefinitionNotFoundException : DistributedTaskException + { + public MetaTaskDefinitionNotFoundException(String message) + : base(message) + { + } + + public MetaTaskDefinitionNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private MetaTaskDefinitionNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class MetaTaskDefinitionRunsOnMismatchException : DistributedTaskException + { + public MetaTaskDefinitionRunsOnMismatchException(String message) + : base(message) + { + } + + public MetaTaskDefinitionRunsOnMismatchException(String message, Exception innerException) + : base(message, innerException) + { + } + + private MetaTaskDefinitionRunsOnMismatchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class InvalidTaskDefinitionTypeException : DistributedTaskException + { + public InvalidTaskDefinitionTypeException(String message) + : base(message) + { + } + + public InvalidTaskDefinitionTypeException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidTaskDefinitionTypeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class PackageExistsException : DistributedTaskException + { + public PackageExistsException(string message) + : base(message) + { + } + + public PackageExistsException(string message, Exception ex) + : base(message, ex) + { + } + + protected PackageExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "PackageNotFoundException", "GitHub.DistributedTask.WebApi.PackageNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class PackageNotFoundException : DistributedTaskException + { + public PackageNotFoundException(String message) + : base(message) + { + } + + public PackageNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private PackageNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "PackagePropertyUnknownException", "GitHub.DistributedTask.WebApi.PackagePropertyUnknownException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class PackagePropertyUnknownException : DistributedTaskException + { + public PackagePropertyUnknownException(String message) + : base(message) + { + } + + public PackagePropertyUnknownException(String message, Exception innerException) + : base(message, innerException) + { + } + + private PackagePropertyUnknownException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class PackageVersionInvalidException : DistributedTaskException + { + public PackageVersionInvalidException(string message) + : base(message) + { + } + + public PackageVersionInvalidException(string message, Exception ex) + : base(message, ex) + { + } + + protected PackageVersionInvalidException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class ServiceEndpointException : DistributedTaskException + { + public ServiceEndpointException(string message) : base(message) + { + } + + public ServiceEndpointException(string message, Exception innerException) : base(message, innerException) + { + } + + protected ServiceEndpointException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + public class OAuthConfigurationException : DistributedTaskException + { + public OAuthConfigurationException(string message) : base(message) + { + } + + public OAuthConfigurationException(string message, Exception innerException) : base(message, innerException) + { + } + + protected OAuthConfigurationException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ServiceEndpointNotFoundException", "GitHub.DistributedTask.WebApi.ServiceEndpointNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ServiceEndpointNotFoundException : DistributedTaskException + { + public ServiceEndpointNotFoundException(String message) + : base(message) + { + } + + public ServiceEndpointNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected ServiceEndpointNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ServiceEndpointQueryFailedException", "GitHub.DistributedTask.WebApi.ServiceEndpointQueryFailedException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ServiceEndpointQueryFailedException : DistributedTaskException + { + public ServiceEndpointQueryFailedException(String message) + : base(message) + { + } + + public ServiceEndpointQueryFailedException(String message, Exception ex) + : base(message, ex) + { + } + + protected ServiceEndpointQueryFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class ServiceEndpointUntrustedHostException : DistributedTaskException + { + public ServiceEndpointUntrustedHostException(string message) + : base(message) + { + } + + public ServiceEndpointUntrustedHostException(string message, Exception ex) + : base(message, ex) + { + } + + protected ServiceEndpointUntrustedHostException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentExistsException", "GitHub.DistributedTask.WebApi.TaskAgentExistsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TaskAgentExistsException : DistributedTaskException + { + public TaskAgentExistsException(String message) + : base(message) + { + } + + public TaskAgentExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected TaskAgentExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentJobFailedNotEnoughSubscriptionResourcesException", "GitHub.DistributedTask.WebApi.TaskAgentJobFailedNotEnoughSubscriptionResourcesException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentJobFailedNotEnoughSubscriptionResourcesException : DistributedTaskException + { + public TaskAgentJobFailedNotEnoughSubscriptionResourcesException(String message) + : base(message) + { + } + + public TaskAgentJobFailedNotEnoughSubscriptionResourcesException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentJobFailedNotEnoughSubscriptionResourcesException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentJobNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentJobNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentJobNotFoundException : DistributedTaskException + { + public TaskAgentJobNotFoundException(String message) + : base(message) + { + } + + public TaskAgentJobNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentJobNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentJobStillRunningException", "GitHub.DistributedTask.WebApi.TaskAgentJobStillRunningException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentJobStillRunningException : DistributedTaskException + { + public TaskAgentJobStillRunningException(String message) + : base(message) + { + } + + public TaskAgentJobStillRunningException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentJobStillRunningException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentJobTokenExpiredException", "GitHub.DistributedTask.WebApi.TaskAgentJobTokenExpiredException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentJobTokenExpiredException : DistributedTaskException + { + public TaskAgentJobTokenExpiredException(String message) + : base(message) + { + } + + public TaskAgentJobTokenExpiredException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentJobTokenExpiredException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentNotFoundException : DistributedTaskException + { + public TaskAgentNotFoundException(String message) + : base(message) + { + } + + public TaskAgentNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentVersionNotSupportedException", "GitHub.DistributedTask.WebApi.TaskAgentVersionNotSupportedException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentVersionNotSupportedException : DistributedTaskException + { + public TaskAgentVersionNotSupportedException(String message) + : base(message) + { + } + + public TaskAgentVersionNotSupportedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentVersionNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPoolExistsException", "GitHub.DistributedTask.WebApi.TaskAgentPoolExistsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TaskAgentPoolExistsException : DistributedTaskException + { + public TaskAgentPoolExistsException(String message) + : base(message) + { + } + + public TaskAgentPoolExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected TaskAgentPoolExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPoolMaintenanceDefinitionNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentPoolMaintenanceDefinitionNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentPoolMaintenanceDefinitionNotFoundException : DistributedTaskException + { + public TaskAgentPoolMaintenanceDefinitionNotFoundException(String message) + : base(message) + { + } + + public TaskAgentPoolMaintenanceDefinitionNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPoolMaintenanceDefinitionNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPoolMaintenanceJobNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentPoolMaintenanceJobNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentPoolMaintenanceJobNotFoundException : DistributedTaskException + { + public TaskAgentPoolMaintenanceJobNotFoundException(String message) + : base(message) + { + } + + public TaskAgentPoolMaintenanceJobNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPoolMaintenanceJobNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPoolMaintenanceNotEnabledException", "GitHub.DistributedTask.WebApi.TaskAgentPoolMaintenanceNotEnabledException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentPoolMaintenanceNotEnabledException : DistributedTaskException + { + public TaskAgentPoolMaintenanceNotEnabledException(String message) + : base(message) + { + } + + public TaskAgentPoolMaintenanceNotEnabledException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPoolMaintenanceNotEnabledException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPendingUpdateExistsException", "GitHub.DistributedTask.WebApi.TaskAgentPendingUpdateExistsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentPendingUpdateExistsException : DistributedTaskException + { + public TaskAgentPendingUpdateExistsException(String message) + : base(message) + { + } + + public TaskAgentPendingUpdateExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPendingUpdateExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPendingUpdateNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentPendingUpdateNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentPendingUpdateNotFoundException : DistributedTaskException + { + public TaskAgentPendingUpdateNotFoundException(String message) + : base(message) + { + } + + public TaskAgentPendingUpdateNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPendingUpdateNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentPoolNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentPoolNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentPoolNotFoundException : DistributedTaskException + { + public TaskAgentPoolNotFoundException(String message) + : base(message) + { + } + + public TaskAgentPoolNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPoolNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskAgentPoolRemovedException : DistributedTaskException + { + public TaskAgentPoolRemovedException(String message) + : base(message) + { + } + + public TaskAgentPoolRemovedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPoolRemovedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskAgentPoolTypeMismatchException : DistributedTaskException + { + public TaskAgentPoolTypeMismatchException(String message) + : base(message) + { + } + + public TaskAgentPoolTypeMismatchException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentPoolTypeMismatchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentPoolInUseException : DistributedTaskException + { + public DeploymentPoolInUseException(String message) + : base(message) + { + } + + public DeploymentPoolInUseException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentPoolInUseException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentQueueExistsException", "GitHub.DistributedTask.WebApi.TaskAgentQueueExistsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentQueueExistsException : DistributedTaskException + { + public TaskAgentQueueExistsException(String message) + : base(message) + { + } + + public TaskAgentQueueExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentQueueExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentQueueNotFoundException", "GitHub.DistributedTask.WebApi.TaskAgentQueueNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentQueueNotFoundException : DistributedTaskException + { + public TaskAgentQueueNotFoundException(String message) + : base(message) + { + } + + public TaskAgentQueueNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentQueueNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentMachineGroupExistsException : DistributedTaskException + { + public DeploymentMachineGroupExistsException(String message) + : base(message) + { + } + + public DeploymentMachineGroupExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentMachineGroupExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class DeploymentGroupException : DistributedTaskException + { + public DeploymentGroupException(string message) : base(message) + { + } + + public DeploymentGroupException(string message, Exception innerException) : base(message, innerException) + { + } + + protected DeploymentGroupException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentGroupExistsException : DistributedTaskException + { + public DeploymentGroupExistsException(String message) + : base(message) + { + } + + public DeploymentGroupExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentGroupExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentMachineExistsException : DistributedTaskException + { + public DeploymentMachineExistsException(String message) + : base(message) + { + } + + public DeploymentMachineExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentMachineExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class RESTEndpointNotSupportedException : DistributedTaskException + { + public RESTEndpointNotSupportedException(String message) + : base(message) + { + } + + public RESTEndpointNotSupportedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private RESTEndpointNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentMachineGroupNotFoundException : DistributedTaskException + { + public DeploymentMachineGroupNotFoundException(String message) + : base(message) + { + } + + public DeploymentMachineGroupNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentMachineGroupNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentGroupNotFoundException : DistributedTaskException + { + public DeploymentGroupNotFoundException(String message) + : base(message) + { + } + + public DeploymentGroupNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentGroupNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class DeploymentMachineNotFoundException : DistributedTaskException + { + public DeploymentMachineNotFoundException(String message) + : base(message) + { + } + + public DeploymentMachineNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private DeploymentMachineNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class InvalidTaskAgentVersionException : DistributedTaskException + { + public InvalidTaskAgentVersionException(String message) + : base(message) + { + } + + public InvalidTaskAgentVersionException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidTaskAgentVersionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentAccessTokenExpiredException : DistributedTaskException + { + public TaskAgentAccessTokenExpiredException(String message) + : base(message) + { + } + + public TaskAgentAccessTokenExpiredException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected TaskAgentAccessTokenExpiredException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentSessionConflictException", "GitHub.DistributedTask.WebApi.TaskAgentSessionConflictException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentSessionConflictException : DistributedTaskException + { + public TaskAgentSessionConflictException(String message) + : base(message) + { + } + + public TaskAgentSessionConflictException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentSessionConflictException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskAgentSessionDeletedException : DistributedTaskException + { + public TaskAgentSessionDeletedException(String message) + : base(message) + { + } + + public TaskAgentSessionDeletedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentSessionDeletedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskAgentSessionExpiredException", "GitHub.DistributedTask.WebApi.TaskAgentSessionExpiredException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskAgentSessionExpiredException : DistributedTaskException + { + public TaskAgentSessionExpiredException(String message) + : base(message) + { + } + + public TaskAgentSessionExpiredException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskAgentSessionExpiredException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskDefinitionExistsException", "GitHub.DistributedTask.WebApi.TaskDefinitionExistsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskDefinitionExistsException : DistributedTaskException + { + public TaskDefinitionExistsException(String message) + : base(message) + { + } + + public TaskDefinitionExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskDefinitionExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskDefinitionExistsWithHigherVersionException", "GitHub.DistributedTask.WebApi.TaskDefinitionExistsException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskDefinitionExistsWithHigherVersionException : DistributedTaskException + { + public TaskDefinitionExistsWithHigherVersionException(String message) + : base(message) + { + } + + public TaskDefinitionExistsWithHigherVersionException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskDefinitionExistsWithHigherVersionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskDefinitionHostContextMismatchException", "GitHub.DistributedTask.WebApi.TaskDefinitionHostContextMismatchException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskDefinitionHostContextMismatchException : DistributedTaskException + { + public TaskDefinitionHostContextMismatchException(String message) + : base(message) + { + } + + public TaskDefinitionHostContextMismatchException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskDefinitionHostContextMismatchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskDefinitionInputRequiredException", "GitHub.DistributedTask.WebApi.TaskDefinitionInputRequiredException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskDefinitionInputRequiredException : DistributedTaskException + { + public TaskDefinitionInputRequiredException(String message) + : base(message) + { + } + + public TaskDefinitionInputRequiredException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskDefinitionInputRequiredException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskDefinitionInvalidException", "GitHub.DistributedTask.WebApi.TaskDefinitionInvalidException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskDefinitionInvalidException : DistributedTaskException + { + public TaskDefinitionInvalidException(String message) + : base(message) + { + } + + public TaskDefinitionInvalidException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskDefinitionInvalidException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskDefinitionNotFoundException", "GitHub.DistributedTask.WebApi.TaskDefinitionNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskDefinitionNotFoundException : DistributedTaskException + { + public TaskDefinitionNotFoundException(String message) + : base(message) + { + } + + public TaskDefinitionNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskDefinitionNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskJsonNotFoundException", "GitHub.DistributedTask.WebApi.TaskJsonNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TaskJsonNotFoundException : DistributedTaskException + { + public TaskJsonNotFoundException(String message) + : base(message) + { + } + + public TaskJsonNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected TaskJsonNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanAlreadyStartedException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanAlreadyStartedException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationPlanAlreadyStartedException : DistributedTaskException + { + public TaskOrchestrationPlanAlreadyStartedException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanAlreadyStartedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanAlreadyStartedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanCanceledException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanCanceledException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationPlanCanceledException : DistributedTaskException + { + public TaskOrchestrationPlanCanceledException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanCanceledException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanCanceledException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanNotFoundException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationPlanNotFoundException : DistributedTaskException + { + public TaskOrchestrationPlanNotFoundException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanNotFoundException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class InvalidLicenseHubException : DistributedTaskException + { + public InvalidLicenseHubException(String message) + : base(message) + { + } + + public InvalidLicenseHubException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidLicenseHubException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationJobNotFoundException", "GitHub.DistributedTask.WebApi.TaskOrchestrationJobNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationJobNotFoundException : DistributedTaskException + { + public TaskOrchestrationJobNotFoundException(String message) + : base(message) + { + } + + public TaskOrchestrationJobNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationJobNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanSecurityException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanSecurityException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationPlanSecurityException : DistributedTaskException + { + public TaskOrchestrationPlanSecurityException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanSecurityException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanSecurityException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskOrchestrationPlanTerminatedException", "GitHub.DistributedTask.WebApi.TaskOrchestrationPlanTerminatedException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskOrchestrationPlanTerminatedException : DistributedTaskException + { + public TaskOrchestrationPlanTerminatedException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanTerminatedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanTerminatedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TimelineExistsException : DistributedTaskException + { + public TimelineExistsException(String message) + : base(message) + { + } + + public TimelineExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TimelineExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TimelineNotFoundException", "GitHub.DistributedTask.WebApi.TimelineNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TimelineNotFoundException : DistributedTaskException + { + public TimelineNotFoundException(String message) + : base(message) + { + } + + public TimelineNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TimelineNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TimelineRecordNotFoundException", "GitHub.DistributedTask.WebApi.TimelineRecordNotFoundException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TimelineRecordNotFoundException : DistributedTaskException + { + public TimelineRecordNotFoundException(String message) + : base(message) + { + } + + public TimelineRecordNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TimelineRecordNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TimelineRecordUpdateException", "GitHub.DistributedTask.WebApi.TimelineRecordUpdateException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TimelineRecordUpdateException : DistributedTaskException + { + public TimelineRecordUpdateException(String message) + : base(message) + { + } + + public TimelineRecordUpdateException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TimelineRecordUpdateException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidTaskJsonException", "GitHub.DistributedTask.WebApi.InvalidTaskJsonException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class InvalidTaskJsonException : DistributedTaskException + { + public InvalidTaskJsonException(String message) + : base(message) + { + } + + public InvalidTaskJsonException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidTaskJsonException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidTaskDefinitionInputs", "GitHub.DistributedTask.WebApi.InvalidTaskDefinitionInputs, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class InvalidTaskDefinitionInputsException : DistributedTaskException + { + public InvalidTaskDefinitionInputsException(String message) + : base(message) + { + } + + public InvalidTaskDefinitionInputsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidTaskDefinitionInputsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidExtensionException", "GitHub.DistributedTask.WebApi.InvalidExtensionException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class InvalidExtensionException : DistributedTaskException + { + public InvalidExtensionException(String message) + : base(message) + { + } + + public InvalidExtensionException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidExtensionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ContributionDoesNotTargetBuildTask", "GitHub.DistributedTask.WebApi.ContributionDoesNotTargetBuildTask, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContributionDoesNotTargetBuildTaskException : DistributedTaskException + { + public ContributionDoesNotTargetBuildTaskException(String message) + : base(message) + { + } + + public ContributionDoesNotTargetBuildTaskException(String message, Exception innerException) + : base(message, innerException) + { + } + + private ContributionDoesNotTargetBuildTaskException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "ContributionDoesNotTargetServiceEndpointException", "GitHub.DistributedTask.WebApi.ContributionDoesNotTargetServiceEndpointException, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContributionDoesNotTargetServiceEndpointException : DistributedTaskException + { + public ContributionDoesNotTargetServiceEndpointException(String message) + : base(message) + { + } + + public ContributionDoesNotTargetServiceEndpointException(String message, Exception innerException) + : base(message, innerException) + { + } + + private ContributionDoesNotTargetServiceEndpointException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupRevisionAlreadyExistsException : DistributedTaskException + { + public TaskGroupRevisionAlreadyExistsException(String message) + : base(message) + { + } + + public TaskGroupRevisionAlreadyExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupRevisionAlreadyExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupAlreadyUpdatedException : DistributedTaskException + { + public TaskGroupAlreadyUpdatedException(String message) + : base(message) + { + } + + public TaskGroupAlreadyUpdatedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupAlreadyUpdatedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class ExtensionIsPublicAndHasPipelineDecoratorsException : DistributedTaskException + { + public ExtensionIsPublicAndHasPipelineDecoratorsException(String message) + : base(message) + { + } + + public ExtensionIsPublicAndHasPipelineDecoratorsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private ExtensionIsPublicAndHasPipelineDecoratorsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupUpdateFailedException : DistributedTaskException + { + public TaskGroupUpdateFailedException(String message) + : base(message) + { + } + + public TaskGroupUpdateFailedException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupUpdateFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskGroupCyclicDependencyException : DistributedTaskException + { + public TaskGroupCyclicDependencyException(String message) + : base(message) + { + } + + public TaskGroupCyclicDependencyException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskGroupCyclicDependencyException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TaskIdsDoNotMatch", "GitHub.DistributedTask.WebApi.TaskIdsDoNotMatch, GitHub.DistributedTask.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class TaskIdsDoNotMatchException : DistributedTaskException + { + public TaskIdsDoNotMatchException(String message) + : base(message) + { + } + + public TaskIdsDoNotMatchException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskIdsDoNotMatchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class VariableGroupExistsException : DistributedTaskException + { + public VariableGroupExistsException(String message) + : base(message) + { + } + + public VariableGroupExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private VariableGroupExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class VariableGroupNotFoundException : DistributedTaskException + { + public VariableGroupNotFoundException(String message) + : base(message) + { + } + + public VariableGroupNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private VariableGroupNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class OAuthConfigurationExistsException : DistributedTaskException + { + public OAuthConfigurationExistsException(String message) + : base(message) + { + } + + public OAuthConfigurationExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private OAuthConfigurationExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class OAuthConfigurationNotFoundException : DistributedTaskException + { + public OAuthConfigurationNotFoundException(String message) + : base(message) + { + } + + public OAuthConfigurationNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private OAuthConfigurationNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class RegularExpressionInvalidOptionsException : DistributedTaskException + { + public RegularExpressionInvalidOptionsException(String message) + : base(message) + { + } + + public RegularExpressionInvalidOptionsException(String message, Exception ex) + : base(message, ex) + { + } + + protected RegularExpressionInvalidOptionsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class RegularExpressionValidationFailureException : DistributedTaskException + { + public RegularExpressionValidationFailureException(String message) + : base(message) + { + } + + public RegularExpressionValidationFailureException(String message, Exception ex) + : base(message, ex) + { + } + + protected RegularExpressionValidationFailureException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class SecureFileExistsException : DistributedTaskException + { + public SecureFileExistsException(String message) + : base(message) + { + } + + public SecureFileExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private SecureFileExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class SecureFileNotFoundException : DistributedTaskException + { + public SecureFileNotFoundException(String message) + : base(message) + { + } + + public SecureFileNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private SecureFileNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class TaskOrchestrationPlanLogNotFoundException : DistributedTaskException + { + public TaskOrchestrationPlanLogNotFoundException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanLogNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanLogNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + [Serializable] + public sealed class TaskOrchestrationPlanGroupNotFoundException : DistributedTaskException + { + public TaskOrchestrationPlanGroupNotFoundException(String message) + : base(message) + { + } + + public TaskOrchestrationPlanGroupNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private TaskOrchestrationPlanGroupNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class XPathJTokenParseException : DistributedTaskException + { + public XPathJTokenParseException(string message) + : base(message) + { + } + + public XPathJTokenParseException(string message, Exception ex) + : base(message, ex) + { + } + + protected XPathJTokenParseException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + + [Serializable] + public class InvalidDatasourceException : DistributedTaskException + { + public InvalidDatasourceException(string message) + : base(message) + { + } + + public InvalidDatasourceException(string message, Exception ex) + : base(message, ex) + { + } + + protected InvalidDatasourceException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + + [Serializable] + public class CannotDeleteAndAddMetadataException : DistributedTaskException + { + public CannotDeleteAndAddMetadataException(string message) + : base(message) + { + } + + public CannotDeleteAndAddMetadataException(string message, Exception ex) + : base(message, ex) + { + } + + protected CannotDeleteAndAddMetadataException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidTaskAgentCloudException : DistributedTaskException + { + public InvalidTaskAgentCloudException(string message) : base(message) + { + } + + public InvalidTaskAgentCloudException(string message, Exception innerException) : base(message, innerException) + { + } + + protected InvalidTaskAgentCloudException(SerializationInfo info, StreamingContext context) : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentCloudExistsException : DistributedTaskException + { + public TaskAgentCloudExistsException(string message) + : base(message) + { + } + + public TaskAgentCloudExistsException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentCloudExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentCloudNotFoundException : DistributedTaskException + { + public TaskAgentCloudNotFoundException(string message) + : base(message) + { + } + + public TaskAgentCloudNotFoundException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentCloudNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentCloudRequestExistsException : DistributedTaskException + { + public TaskAgentCloudRequestExistsException(string message) + : base(message) + { + } + + public TaskAgentCloudRequestExistsException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentCloudRequestExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentCloudRequestNotFoundException : DistributedTaskException + { + public TaskAgentCloudRequestNotFoundException(string message) + : base(message) + { + } + + public TaskAgentCloudRequestNotFoundException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentCloudRequestNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentCloudRequestAlreadyCompleteException : DistributedTaskException + { + public TaskAgentCloudRequestAlreadyCompleteException(string message) + : base(message) + { + } + + public TaskAgentCloudRequestAlreadyCompleteException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentCloudRequestAlreadyCompleteException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentPoolReferencesDifferentAgentCloudException : DistributedTaskException + { + public TaskAgentPoolReferencesDifferentAgentCloudException(string message) + : base(message) + { + } + + public TaskAgentPoolReferencesDifferentAgentCloudException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentPoolReferencesDifferentAgentCloudException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class PrivateTaskAgentProvisioningStateInvalidException : DistributedTaskException + { + public PrivateTaskAgentProvisioningStateInvalidException(string message) + : base(message) + { + } + + public PrivateTaskAgentProvisioningStateInvalidException(string message, Exception ex) + : base(message, ex) + { + } + + protected PrivateTaskAgentProvisioningStateInvalidException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class AgentFileNotFoundException : DistributedTaskException + { + public AgentFileNotFoundException(string message) + : base(message) + { + } + + public AgentFileNotFoundException(string message, Exception ex) + : base(message, ex) + { + } + + protected AgentFileNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class AgentMediaTypeNotSupportedException : DistributedTaskException + { + public AgentMediaTypeNotSupportedException(string message) + : base(message) + { + } + + public AgentMediaTypeNotSupportedException(string message, Exception ex) + : base(message, ex) + { + } + + protected AgentMediaTypeNotSupportedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class TaskAgentCloudCommunicationException : DistributedTaskException + { + public TaskAgentCloudCommunicationException(string message) + : base(message) + { + } + + public TaskAgentCloudCommunicationException(string message, Exception ex) + : base(message, ex) + { + } + + protected TaskAgentCloudCommunicationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentExistsException : DistributedTaskException + { + public EnvironmentExistsException(String message) + : base(message) + { + } + + public EnvironmentExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentNotFoundException : DistributedTaskException + { + public EnvironmentNotFoundException(String message) + : base(message) + { + } + + public EnvironmentNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentResourceExistsException : DistributedTaskException + { + public EnvironmentResourceExistsException(String message) + : base(message) + { + } + + public EnvironmentResourceExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentResourceExistsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentResourceNotFoundException : DistributedTaskException + { + public EnvironmentResourceNotFoundException(String message) + : base(message) + { + } + + public EnvironmentResourceNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentResourceNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentResourcesExceededMaxCountException : DistributedTaskException + { + public EnvironmentResourcesExceededMaxCountException(String message) + : base(message) + { + } + + public EnvironmentResourcesExceededMaxCountException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentResourcesExceededMaxCountException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentExecutionDeploymentHistoryRecordNotFoundException : DistributedTaskException + { + public EnvironmentExecutionDeploymentHistoryRecordNotFoundException(String message) + : base(message) + { + } + + public EnvironmentExecutionDeploymentHistoryRecordNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentExecutionDeploymentHistoryRecordNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class EnvironmentPoolAlreadyInUseException : DistributedTaskException + { + public EnvironmentPoolAlreadyInUseException(String message) + : base(message) + { + } + + public EnvironmentPoolAlreadyInUseException(String message, Exception innerException) + : base(message, innerException) + { + } + + private EnvironmentPoolAlreadyInUseException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public sealed class InvalidContinuationTokenException : DistributedTaskException + { + public InvalidContinuationTokenException(String message) + : base(message) + { + } + + public InvalidContinuationTokenException(String message, Exception innerException) + : base(message, innerException) + { + } + + private InvalidContinuationTokenException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ExpressionValidationItem.cs b/src/Sdk/DTWebApi/WebApi/ExpressionValidationItem.cs new file mode 100644 index 00000000000..0a294e23bb9 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ExpressionValidationItem.cs @@ -0,0 +1,13 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class ExpressionValidationItem : ValidationItem + { + public ExpressionValidationItem() + : base(InputValidationTypes.Expression) + { + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/IOrchestrationProcess.cs b/src/Sdk/DTWebApi/WebApi/IOrchestrationProcess.cs new file mode 100644 index 00000000000..3fd813dc09c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/IOrchestrationProcess.cs @@ -0,0 +1,172 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + [ClientIgnore] + [DataContract] + [EditorBrowsable(EditorBrowsableState.Never)] + public enum OrchestrationProcessType + { + [DataMember] + Container = 1, + + [DataMember] + Pipeline = 2, + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [JsonConverter(typeof(OrchestrationEnvironmentJsonConverter))] + public interface IOrchestrationEnvironment + { + OrchestrationProcessType ProcessType { get; } + + IDictionary Variables { get; } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [JsonConverter(typeof(OrchestrationProcessJsonConverter))] + public interface IOrchestrationProcess + { + OrchestrationProcessType ProcessType { get; } + } + + internal sealed class OrchestrationProcessJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(IOrchestrationProcess).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + JObject value = JObject.Load(reader); + IOrchestrationProcess process = null; + if (value.TryGetValue("stages", StringComparison.OrdinalIgnoreCase, out _) || + value.TryGetValue("phases", StringComparison.OrdinalIgnoreCase, out _)) + { + process = new PipelineProcess(); + } + else if (value.TryGetValue("children", StringComparison.OrdinalIgnoreCase, out _)) + { + process = new TaskOrchestrationContainer(); + } + + if (process != null) + { + using (var objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, process); + } + } + + return process; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } + + internal sealed class OrchestrationEnvironmentJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(IOrchestrationEnvironment).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + JToken propertyValue; + JObject value = JObject.Load(reader); + IOrchestrationEnvironment environment = null; + OrchestrationProcessType processType = OrchestrationProcessType.Container; + if (value.TryGetValue("ProcessType", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + if (propertyValue.Type == JTokenType.Integer) + { + processType = (OrchestrationProcessType)(Int32)propertyValue; + } + else if (propertyValue.Type != JTokenType.String || !Enum.TryParse((String)propertyValue, true, out processType)) + { + return null; + } + } + + switch (processType) + { + case OrchestrationProcessType.Container: + environment = new PlanEnvironment(); + break; + + case OrchestrationProcessType.Pipeline: + environment = new PipelineEnvironment(); + break; + } + + if (environment != null) + { + using (var objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, environment); + } + } + + return environment; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ITaskDefinitionReference.cs b/src/Sdk/DTWebApi/WebApi/ITaskDefinitionReference.cs new file mode 100644 index 00000000000..4a70dffe29d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ITaskDefinitionReference.cs @@ -0,0 +1,16 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ITaskDefinitionReference + { + Guid Id { get; } + + String Name { get; } + + String Version { get; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/IdentityRefExtensions.cs b/src/Sdk/DTWebApi/WebApi/IdentityRefExtensions.cs new file mode 100644 index 00000000000..9813c0905f1 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/IdentityRefExtensions.cs @@ -0,0 +1,27 @@ +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + internal static class IdentityRefExtensions + { + public static IdentityRef Clone(this IdentityRef source) + { + if (source == null) + { + return null; + } + + return new IdentityRef + { + DisplayName = source.DisplayName, + Id = source.Id, + ImageUrl = source.ImageUrl, + IsAadIdentity = source.IsAadIdentity, + IsContainer = source.IsContainer, + ProfileUrl = source.ProfileUrl, + UniqueName = source.UniqueName, + Url = source.Url, + }; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/InputBindingContext.cs b/src/Sdk/DTWebApi/WebApi/InputBindingContext.cs new file mode 100644 index 00000000000..694b6cdf763 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/InputBindingContext.cs @@ -0,0 +1,19 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class InputBindingContext + { + /// + /// Value of the input + /// + [DataMember(EmitDefaultValue = false)] + public String Value + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/InputValidationItem.cs b/src/Sdk/DTWebApi/WebApi/InputValidationItem.cs new file mode 100644 index 00000000000..acb5a72bf39 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/InputValidationItem.cs @@ -0,0 +1,23 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class InputValidationItem : ValidationItem + { + public InputValidationItem() + : base(InputValidationTypes.Input) + { + } + + /// + /// Provides binding context for the expression to evaluate + /// + [DataMember(EmitDefaultValue = false)] + public InputBindingContext Context + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Issue.cs b/src/Sdk/DTWebApi/WebApi/Issue.cs new file mode 100644 index 00000000000..4875ca5a1d6 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Issue.cs @@ -0,0 +1,91 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class Issue + { + + public Issue() + { + } + + private Issue(Issue issueToBeCloned) + { + this.Type = issueToBeCloned.Type; + this.Category = issueToBeCloned.Category; + this.Message = issueToBeCloned.Message; + + if (issueToBeCloned.m_data != null) + { + foreach (var item in issueToBeCloned.m_data) + { + this.Data.Add(item); + } + } + } + + [DataMember(Order = 1)] + public IssueType Type + { + get; + set; + } + + [DataMember(Order = 2)] + public String Category + { + get; + set; + } + + [DataMember(Order = 3)] + public String Message + { + get; + set; + } + + public IDictionary Data + { + get + { + if (m_data == null) + { + m_data = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_data; + } + } + + public Issue Clone() + { + return new Issue(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedData, ref m_data, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_data, ref m_serializedData, StringComparer.OrdinalIgnoreCase); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedData = null; + } + + [DataMember(Name = "Data", EmitDefaultValue = false, Order = 4)] + private IDictionary m_serializedData; + + private IDictionary m_data; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/IssueType.cs b/src/Sdk/DTWebApi/WebApi/IssueType.cs new file mode 100644 index 00000000000..8b8e52d1e88 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/IssueType.cs @@ -0,0 +1,14 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum IssueType + { + [EnumMember] + Error = 1, + + [EnumMember] + Warning = 2 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobCancelMessage.cs b/src/Sdk/DTWebApi/WebApi/JobCancelMessage.cs new file mode 100644 index 00000000000..e7b0c78cc79 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobCancelMessage.cs @@ -0,0 +1,47 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class JobCancelMessage + { + public static readonly String MessageType = "JobCancellation"; + + [JsonConstructor] + internal JobCancelMessage() + { + } + + public JobCancelMessage(Guid jobId, TimeSpan timeout) + { + this.JobId = jobId; + this.Timeout = timeout; + } + + [DataMember] + public Guid JobId + { + get; + private set; + } + + [DataMember] + public TimeSpan Timeout + { + get; + private set; + } + + public TaskAgentMessage GetAgentMessage() + { + return new TaskAgentMessage + { + Body = JsonUtility.ToString(this), + MessageType = JobCancelMessage.MessageType, + }; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobEnvironment.cs b/src/Sdk/DTWebApi/WebApi/JobEnvironment.cs new file mode 100644 index 00000000000..a0ba24ffb7d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobEnvironment.cs @@ -0,0 +1,294 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents the context of variables and vectors for a job request. + /// + [DataContract] + public sealed class JobEnvironment : ICloneable + { + /// + /// Initializes a new JobEnvironment with empty collections of repositories, vectors, + /// and variables. + /// + public JobEnvironment() + { + } + + public JobEnvironment( + IDictionary variables, + List maskhints, + JobResources resources) + { + if (resources!= null) + { + this.Endpoints.AddRange(resources.Endpoints.Where(x => !String.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase))); + this.SystemConnection = resources.Endpoints.FirstOrDefault(x => String.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase)); + this.SecureFiles.AddRange(resources.SecureFiles); + } + + if (maskhints != null) + { + this.MaskHints.AddRange(maskhints); + } + + if (variables != null) + { + foreach (var variable in variables) + { + this.Variables[variable.Key] = variable.Value?.Value; + + if (variable.Value?.IsSecret == true) + { + // Make sure we propagate secret variables into the mask hints + this.MaskHints.Add(new MaskHint { Type = MaskType.Variable, Value = variable.Key }); + } + } + } + } + + public void Extract( + Dictionary variables, + HashSet maskhints, + JobResources jobResources) + { + // construct variables + HashSet secretVariables = new HashSet(this.MaskHints.Where(t => t.Type == MaskType.Variable).Select(v => v.Value), StringComparer.OrdinalIgnoreCase); + foreach (var variable in this.Variables) + { + variables[variable.Key] = new VariableValue(variable.Value, secretVariables.Contains(variable.Key)); + } + + // construct maskhints + maskhints.AddRange(this.MaskHints.Where(x => !(x.Type == MaskType.Variable && secretVariables.Contains(x.Value))).Select(x => x.Clone())); + + // constuct job resources (endpoints, securefiles and systemconnection) + jobResources.SecureFiles.AddRange(this.SecureFiles.Select(x => x.Clone())); + jobResources.Endpoints.AddRange(this.Endpoints.Select(x => x.Clone())); + + if (this.SystemConnection != null) + { + jobResources.Endpoints.Add(this.SystemConnection.Clone()); + } + } + + public JobEnvironment(PlanEnvironment environment) + { + ArgumentUtility.CheckForNull(environment, nameof(environment)); + + if (environment.MaskHints.Count > 0) + { + m_maskHints = new List(environment.MaskHints.Select(x => x.Clone())); + } + + if (environment.Options.Count > 0) + { + m_options = environment.Options.ToDictionary(x => x.Key, x => x.Value.Clone()); + } + + if (environment.Variables.Count > 0) + { + m_variables = new Dictionary(environment.Variables, StringComparer.OrdinalIgnoreCase); + } + } + + private JobEnvironment(JobEnvironment environmentToClone) + { + if (environmentToClone.SystemConnection != null) + { + this.SystemConnection = environmentToClone.SystemConnection.Clone(); + } + + if (environmentToClone.m_maskHints != null) + { + m_maskHints = environmentToClone.m_maskHints.Select(x => x.Clone()).ToList(); + } + + if (environmentToClone.m_endpoints != null) + { + m_endpoints = environmentToClone.m_endpoints.Select(x => x.Clone()).ToList(); + } + + if (environmentToClone.m_secureFiles != null) + { + m_secureFiles = environmentToClone.m_secureFiles.Select(x => x.Clone()).ToList(); + } + + if (environmentToClone.m_options != null) + { + m_options = environmentToClone.m_options.ToDictionary(x => x.Key, x => x.Value.Clone()); + } + + if (environmentToClone.m_variables != null) + { + m_variables = new Dictionary(environmentToClone.m_variables, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Gets or sets the endpoint used for communicating back to the calling service. + /// + [DataMember(EmitDefaultValue = false)] + public ServiceEndpoint SystemConnection + { + get; + set; + } + + /// + /// Gets the collection of mask hints + /// + public List MaskHints + { + get + { + if (m_maskHints == null) + { + m_maskHints = new List(); + } + return m_maskHints; + } + } + + /// + /// Gets the collection of endpoints associated with the current context. + /// + public List Endpoints + { + get + { + if (m_endpoints == null) + { + m_endpoints = new List(); + } + return m_endpoints; + } + } + + /// + /// Gets the collection of secure files associated with the current context + /// + public List SecureFiles + { + get + { + if (m_secureFiles == null) + { + m_secureFiles = new List(); + } + return m_secureFiles; + } + } + + /// + /// Gets the collection of options associated with the current context. (Deprecated, use by 1.x agent) + /// + public IDictionary Options + { + get + { + if (m_options == null) + { + m_options = new Dictionary(); + } + return m_options; + } + } + + /// + /// Gets the collection of variables associated with the current context. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_variables; + } + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + /// + /// Creates a deep copy of the job environment. + /// + /// A deep copy of the job environment + public JobEnvironment Clone() + { + return new JobEnvironment(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (m_serializedMaskHints != null && m_serializedMaskHints.Count > 0) + { + m_maskHints = new List(m_serializedMaskHints.Distinct()); + } + + m_serializedMaskHints = null; + + SerializationHelper.Copy(ref m_serializedVariables, ref m_variables, true); + SerializationHelper.Copy(ref m_serializedEndpoints, ref m_endpoints, true); + SerializationHelper.Copy(ref m_serializedSecureFiles, ref m_secureFiles, true); + SerializationHelper.Copy(ref m_serializedOptions, ref m_options, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (this.m_maskHints != null && this.m_maskHints.Count > 0) + { + m_serializedMaskHints = new List(this.m_maskHints.Distinct()); + } + + SerializationHelper.Copy(ref m_variables, ref m_serializedVariables); + SerializationHelper.Copy(ref m_endpoints, ref m_serializedEndpoints); + SerializationHelper.Copy(ref m_secureFiles, ref m_serializedSecureFiles); + SerializationHelper.Copy(ref m_options, ref m_serializedOptions); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedMaskHints = null; + m_serializedVariables = null; + m_serializedEndpoints = null; + m_serializedSecureFiles = null; + m_serializedOptions = null; + } + + private List m_maskHints; + private List m_endpoints; + private List m_secureFiles; + private IDictionary m_options; + private IDictionary m_variables; + + [DataMember(Name = "Endpoints", EmitDefaultValue = false)] + private List m_serializedEndpoints; + + [DataMember(Name = "SecureFiles", EmitDefaultValue = false)] + private List m_serializedSecureFiles; + + [DataMember(Name = "Options", EmitDefaultValue = false)] + private IDictionary m_serializedOptions; + + [DataMember(Name = "Mask", EmitDefaultValue = false)] + private List m_serializedMaskHints; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_serializedVariables; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobEvent.cs b/src/Sdk/DTWebApi/WebApi/JobEvent.cs new file mode 100644 index 00000000000..b17e44493cd --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobEvent.cs @@ -0,0 +1,355 @@ +using System; +using System.Collections.Generic; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + public static class JobEventTypes + { + public const String JobAssigned = "JobAssigned"; + + public const String JobCompleted = "JobCompleted"; + + public const String JobStarted = "JobStarted"; + + public const String TaskAssigned = "TaskAssigned"; + + public const String TaskStarted = "TaskStarted"; + + public const String TaskCompleted = "TaskCompleted"; + } + + [DataContract] + [KnownType(typeof(JobAssignedEvent))] + [KnownType(typeof(JobCompletedEvent))] + [KnownType(typeof(JobStartedEvent))] + [KnownType(typeof(TaskAssignedEvent))] + [KnownType(typeof(TaskStartedEvent))] + [KnownType(typeof(TaskCompletedEvent))] + [KnownType(typeof(TaskLocalExecutionCompletedEvent))] + [JsonConverter(typeof(JobEventJsonConverter))] + public abstract class JobEvent + { + protected JobEvent(String name) + { + this.Name = name; + } + + protected JobEvent( + String name, + Guid jobId) + { + this.Name = name; + this.JobId = jobId; + } + + [DataMember] + public String Name + { + get; + private set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid JobId + { + get; + set; + } + } + + [DataContract] + public sealed class JobAssignedEvent : JobEvent + { + internal JobAssignedEvent() + : base(JobEventTypes.JobAssigned) + { + } + + public JobAssignedEvent(Guid jobId) + : base(JobEventTypes.JobAssigned, jobId) + { + } + + public JobAssignedEvent( + Guid jobId, + TaskAgentJobRequest request) + : base(JobEventTypes.JobAssigned, jobId) + { + this.Request = request; + } + + [DataMember] + public TaskAgentJobRequest Request + { + get; + set; + } + } + + [DataContract] + public sealed class JobStartedEvent : JobEvent + { + internal JobStartedEvent() + : base(JobEventTypes.JobStarted) + { + } + + public JobStartedEvent(Guid jobId) + : base(JobEventTypes.JobStarted, jobId) + { + } + } + + [DataContract] + public sealed class JobCompletedEvent : JobEvent + { + internal JobCompletedEvent() + : base(JobEventTypes.JobCompleted) + { + } + + public JobCompletedEvent( + Guid jobId, + TaskResult result) + : this(0, jobId, result) + { + } + + public JobCompletedEvent( + Int64 requestId, + Guid jobId, + TaskResult result) + : this(requestId, jobId, result, null) + { + } + + public JobCompletedEvent( + Int64 requestId, + Guid jobId, + TaskResult result, + IDictionary outputVariables) + : base(JobEventTypes.JobCompleted, jobId) + { + this.RequestId = requestId; + this.Result = result; + } + + [DataMember(EmitDefaultValue = false)] + public Int64 RequestId + { + get; + set; + } + + [DataMember] + public TaskResult Result + { + get; + set; + } + } + + [DataContract] + public abstract class TaskEvent : JobEvent + { + protected TaskEvent(string name) : base(name) + { + } + + protected TaskEvent( + string name, + Guid jobId, + Guid taskId) + : base(name, jobId) + { + TaskId = taskId; + } + + [DataMember(EmitDefaultValue = false)] + public Guid TaskId + { + get; + set; + } + } + + [DataContract] + public sealed class TaskAssignedEvent : TaskEvent + { + public TaskAssignedEvent() + : base(JobEventTypes.TaskAssigned) + { + } + + public TaskAssignedEvent( + Guid jobId, + Guid taskId) + : base(JobEventTypes.TaskAssigned, jobId, taskId) + { + } + } + + [DataContract] + public sealed class TaskStartedEvent : TaskEvent + { + public TaskStartedEvent() + : base(JobEventTypes.TaskStarted) + { + } + + public TaskStartedEvent( + Guid jobId, + Guid taskId) + : base(JobEventTypes.TaskStarted, jobId, taskId) + { + } + } + + [DataContract] + public sealed class TaskCompletedEvent : TaskEvent + { + public TaskCompletedEvent() + : base(JobEventTypes.TaskCompleted) + { + } + + public TaskCompletedEvent( + Guid jobId, + Guid taskId, + TaskResult taskResult) + : base(JobEventTypes.TaskCompleted, jobId, taskId) + { + Result = taskResult; + } + + [DataMember] + public TaskResult Result + { + get; + set; + } + } + + [DataContract] + [ClientIgnore] + internal sealed class TaskLocalExecutionCompletedEvent : TaskEvent + { + public TaskLocalExecutionCompletedEvent() + : base(JobEventTypes.TaskCompleted) + { + } + + public TaskLocalExecutionCompletedEvent( + Guid jobId, + Guid taskId, + ServerTaskSectionExecutionOutput data) + : base(JobEventTypes.TaskCompleted, jobId, taskId) + { + EventData = data; + } + + [DataMember] + public ServerTaskSectionExecutionOutput EventData + { + get; + set; + } + } + + internal sealed class JobEventJsonConverter : VssSecureJsonConverter + { + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(JobEvent).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + var eventObject = JObject.Load(reader); + + JToken propertyValue; + JobEvent jobEvent = null; + if (eventObject.TryGetValue("Name", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + if (propertyValue.Type == JTokenType.String) + { + String nameValue = (String)propertyValue; + if (String.Equals(nameValue, JobEventTypes.JobAssigned, StringComparison.Ordinal)) + { + jobEvent = new JobAssignedEvent(); + } + else if (String.Equals(nameValue, JobEventTypes.JobCompleted, StringComparison.Ordinal)) + { + jobEvent = new JobCompletedEvent(); + } + else if (String.Equals(nameValue, JobEventTypes.JobStarted, StringComparison.Ordinal)) + { + jobEvent = new JobStartedEvent(); + } + else if (String.Equals(nameValue, JobEventTypes.TaskAssigned, StringComparison.Ordinal)) + { + jobEvent = new TaskAssignedEvent(); + } + else if (String.Equals(nameValue, JobEventTypes.TaskStarted, StringComparison.Ordinal)) + { + jobEvent = new TaskStartedEvent(); + } + else if (String.Equals(nameValue, JobEventTypes.TaskCompleted, StringComparison.Ordinal)) + { + jobEvent = new TaskCompletedEvent(); + } + } + } + + if (jobEvent == null) + { + if (eventObject.TryGetValue("Request", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + jobEvent = new JobAssignedEvent(); + } + else if (eventObject.TryGetValue("Result", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + jobEvent = new JobCompletedEvent(); + } + } + + if (jobEvent == null) + { + return existingValue; + } + + using (var objectReader = eventObject.CreateReader()) + { + serializer.Populate(objectReader, jobEvent); + } + + return jobEvent; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobExecutionModeTypes.cs b/src/Sdk/DTWebApi/WebApi/JobExecutionModeTypes.cs new file mode 100644 index 00000000000..911da471acd --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobExecutionModeTypes.cs @@ -0,0 +1,12 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.WebApi +{ + public class JobExecutionModeTypes + { + public const string Server = "Server"; + + public const string Agent = "Agent"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobOption.cs b/src/Sdk/DTWebApi/WebApi/JobOption.cs new file mode 100644 index 00000000000..36c7b4276ba --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobOption.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents an option that may affect the way an agent runs the job. + /// + [DataContract] + public class JobOption : ICloneable + { + /// + /// Initializes a new JobOption with an empty collection of data. + /// + public JobOption() + { + } + + private JobOption(JobOption optionToClone) + { + this.Id = optionToClone.Id; + + if (optionToClone.m_data != null) + { + m_data = new Dictionary(optionToClone.m_data, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Gets the id of the option. + /// + [DataMember] + public Guid Id { get; set; } + + /// + /// Gets the data associated with the option. + /// + public IDictionary Data + { + get + { + if (m_data == null) + { + m_data = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_data; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + if (m_serializedData != null && m_serializedData.Count > 0) + { + m_data = new Dictionary(m_serializedData, StringComparer.OrdinalIgnoreCase); + } + + m_serializedData = null; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + m_serializedData = this.Data.Count > 0 ? this.Data : null; + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + /// + /// Creates a deep copy of the job option. + /// + /// A deep copy of the job option + public JobOption Clone() + { + return new JobOption(this); + } + + private Dictionary m_data; + + [DataMember(Name = "Data", EmitDefaultValue = false)] + private IDictionary m_serializedData; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobRequestMessage.cs b/src/Sdk/DTWebApi/WebApi/JobRequestMessage.cs new file mode 100644 index 00000000000..4f0098178a2 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobRequestMessage.cs @@ -0,0 +1,86 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [JsonConverter(typeof(JobRequestMessageJsonConverter))] + + public abstract class JobRequestMessage + { + protected JobRequestMessage(string messageType) + { + this.MessageType = messageType; + } + + protected JobRequestMessage( + string messageType, + TaskOrchestrationPlanReference plan, + TimelineReference timeline, + Guid jobId, + String jobName, + String jobRefName, + JobEnvironment environment) + { + this.MessageType = messageType; + this.Plan = plan; + this.JobId = jobId; + this.JobName = jobName; + this.JobRefName = jobRefName; + this.Timeline = timeline; + this.Environment = environment; + } + + [DataMember] + public String MessageType + { + get; + private set; + } + + [DataMember] + public TaskOrchestrationPlanReference Plan + { + get; + private set; + } + + [DataMember] + public TimelineReference Timeline + { + get; + private set; + } + + [DataMember] + public Guid JobId + { + get; + private set; + } + + [DataMember] + public String JobName + { + get; + private set; + } + + [DataMember] + public String JobRefName + { + get; + private set; + } + + [DataMember] + public JobEnvironment Environment + { + get; + private set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobRequestMessageJsonConverter.cs b/src/Sdk/DTWebApi/WebApi/JobRequestMessageJsonConverter.cs new file mode 100644 index 00000000000..d1810d7b59c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobRequestMessageJsonConverter.cs @@ -0,0 +1,89 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + internal sealed class JobRequestMessageJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(JobRequestMessage).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + Object newValue = null; + JToken propertyValue; + JObject value = JObject.Load(reader); + + if (value.TryGetValue("MessageType", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + if (propertyValue.Type == JTokenType.String) + { + var messageType = (String)propertyValue; + + switch (messageType) + { + case JobRequestMessageTypes.AgentJobRequest: + newValue = new AgentJobRequestMessage(); + break; + + case JobRequestMessageTypes.ServerTaskRequest: + case JobRequestMessageTypes.ServerJobRequest: + newValue = new ServerTaskRequestMessage(); + break; + } + } + } + + if (newValue == null) + { + if (value.TryGetValue("RequestId", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + newValue = new AgentJobRequestMessage(); + } + } + + if (newValue == null) + { + return existingValue; + } + + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + // The virtual method returns false for CanWrite so this should never be invoked + throw new NotSupportedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/JobRequestMessageTypes.cs b/src/Sdk/DTWebApi/WebApi/JobRequestMessageTypes.cs new file mode 100644 index 00000000000..2edc57e4c52 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/JobRequestMessageTypes.cs @@ -0,0 +1,15 @@ +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class JobRequestMessageTypes + { + public const String AgentJobRequest = "JobRequest"; + + public const String ServerJobRequest = "ServerJobRequest"; + + public const String ServerTaskRequest = "ServerTaskRequest"; + + public const String PipelineAgentJobRequest = "PipelineAgentJobRequest"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/MachineGroupActionFilter.cs b/src/Sdk/DTWebApi/WebApi/MachineGroupActionFilter.cs new file mode 100644 index 00000000000..733c02e84ed --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/MachineGroupActionFilter.cs @@ -0,0 +1,19 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [Flags] + [DataContract] + public enum MachineGroupActionFilter + { + [EnumMember] + None = 0, + + [EnumMember] + Manage = 2, + + [EnumMember] + Use = 16, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/MarketplacePurchasedLicense.cs b/src/Sdk/DTWebApi/WebApi/MarketplacePurchasedLicense.cs new file mode 100644 index 00000000000..093ecd718da --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/MarketplacePurchasedLicense.cs @@ -0,0 +1,35 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents a purchase of resource units in a secondary marketplace. + /// + /// + /// The type of resource purchased (pipelines, minutes) is not represented here. + /// + [DataContract] + public sealed class MarketplacePurchasedLicense + { + /// + /// The Marketplace display name. + /// + /// "GitHub" + [DataMember(EmitDefaultValue = false)] + public String MarketplaceName { get; set; } + + /// + /// The name of the identity making the purchase as seen by the marketplace + /// + /// "AppPreview, Microsoft, etc." + [DataMember(EmitDefaultValue = false)] + public String PurchaserName { get; set; } + + /// + /// The quantity purchased. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 PurchaseUnitCount { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/MaskHint.cs b/src/Sdk/DTWebApi/WebApi/MaskHint.cs new file mode 100644 index 00000000000..3b5148f742d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/MaskHint.cs @@ -0,0 +1,54 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class MaskHint + { + public MaskHint() + { + } + + private MaskHint(MaskHint maskHintToBeCloned) + { + this.Type = maskHintToBeCloned.Type; + this.Value = maskHintToBeCloned.Value; + } + + public MaskHint Clone() + { + return new MaskHint(this); + } + + [DataMember] + public MaskType Type + { + get; + set; + } + + [DataMember] + public String Value + { + get; + set; + } + + public override Boolean Equals(Object obj) + { + var otherHint = obj as MaskHint; + if (otherHint != null) + { + return this.Type == otherHint.Type && String.Equals(this.Value ?? String.Empty, otherHint.Value ?? String.Empty, StringComparison.Ordinal); + } + + return false; + } + + public override Int32 GetHashCode() + { + return this.Type.GetHashCode() ^ (this.Value ?? String.Empty).GetHashCode(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/MaskType.cs b/src/Sdk/DTWebApi/WebApi/MaskType.cs new file mode 100644 index 00000000000..b21d7bdea4e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/MaskType.cs @@ -0,0 +1,14 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum MaskType + { + [EnumMember] + Variable = 1, + + [EnumMember] + Regex = 2 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/MetricsColumn.cs b/src/Sdk/DTWebApi/WebApi/MetricsColumn.cs new file mode 100644 index 00000000000..2ba192d7314 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/MetricsColumn.cs @@ -0,0 +1,100 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Metrics columns header + /// + [DataContract] + public sealed class MetricsColumnsHeader + { + /// + /// Properties of deployment group for which metrics are provided. + /// E.g. 1: LastJobStatus + /// E.g. 2: TargetState + /// + public IList Dimensions + { + get + { + if (m_dimensions == null) + { + m_dimensions = new List(); + } + + return m_dimensions; + } + internal set + { + m_dimensions = value; + } + } + + /// + /// The types of metrics. + /// E.g. 1: total count of deployment targets. + /// E.g. 2: Average time of deployment to the deployment targets. + /// + public IList Metrics + { + get + { + if (m_metrics == null) + { + m_metrics = new List(); + } + + return m_metrics; + } + internal set + { + m_metrics = value; + } + } + + /// + /// Properties of deployment group for which metrics are provided. + /// E.g. 1: LastJobStatus + /// E.g. 2: TargetState + /// + [DataMember(Name = "Dimensions")] + private IList m_dimensions; + + /// + /// The types of metrics. + /// E.g. 1: total count of deployment targets. + /// E.g. 2: Average time of deployment to the deployment targets. + /// + [DataMember(Name = "Metrics")] + private IList m_metrics; + } + + /// + /// Meta data for a metrics column. + /// + [DataContract] + public sealed class MetricsColumnMetaData + { + /// + /// Name. + /// + [DataMember] + public String ColumnName + { + get; + internal set; + } + + /// + /// Data type. + /// + [DataMember] + public String ColumnValueType + { + get; + internal set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/MetricsRow.cs b/src/Sdk/DTWebApi/WebApi/MetricsRow.cs new file mode 100644 index 00000000000..5ae35278234 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/MetricsRow.cs @@ -0,0 +1,71 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Metrics row. + /// + [DataContract] + public sealed class MetricsRow + { + /// + /// The values of the properties mentioned as 'Dimensions' in column header. + /// E.g. 1: For a property 'LastJobStatus' - metrics will be provided for 'passed', 'failed', etc. + /// E.g. 2: For a property 'TargetState' - metrics will be provided for 'online', 'offline' targets. + /// + public IList Dimensions + { + get + { + if (m_dimensions == null) + { + m_dimensions = new List(); + } + + return m_dimensions; + } + internal set + { + m_dimensions = value; + } + } + + /// + /// Metrics in serialized format. + /// Should be deserialized based on the data type provided in header. + /// + public IList Metrics + { + get + { + if (m_metrics == null) + { + m_metrics = new List(); + } + + return m_metrics; + } + internal set + { + m_metrics = value; + } + } + + /// + /// The values of the properties mentioned as 'Dimensions' in column header. + /// E.g. 1: For a property 'LastJobStatus' - metrics will be provided for 'passed', 'failed', etc. + /// E.g. 2: For a property 'TargetState' - metrics will be provided for 'online', 'offline' targets. + /// + [DataMember(Name = "Dimensions")] + private IList m_dimensions; + + /// + /// Metrics in serialized format. + /// Should be deserialized based on the data type provided in header. + /// + [DataMember(Name = "Metrics")] + private IList m_metrics; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/PackageMetadata.cs b/src/Sdk/DTWebApi/WebApi/PackageMetadata.cs new file mode 100644 index 00000000000..9840ea1d4b9 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/PackageMetadata.cs @@ -0,0 +1,92 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents a downloadable package. + /// + [DataContract] + public class PackageMetadata + { + /// + /// The type of package (e.g. "agent") + /// + [DataMember] + public String Type + { + get; + set; + } + + /// + /// The platform (win7, linux, etc.) + /// + [DataMember] + public String Platform + { + get; + set; + } + + /// + /// The date the package was created + /// + [DataMember(EmitDefaultValue = false)] + public DateTime CreatedOn + { + get; + set; + } + + /// + /// The package version. + /// + [DataMember(EmitDefaultValue = false)] + public PackageVersion Version + { + get; + set; + } + + /// + /// A direct link to download the package. + /// + [DataMember] + public String DownloadUrl + { + get; + set; + } + + /// + /// MD5 hash as a base64 string + /// + [DataMember(EmitDefaultValue = false)] + public String HashValue + { + get; + set; + } + + /// + /// A link to documentation + /// + [DataMember] + public String InfoUrl + { + get; + set; + } + + /// + /// The UI uses this to display instructions, i.e. "unzip MyAgent.zip" + /// + [DataMember] + public String Filename + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/PackageVersion.cs b/src/Sdk/DTWebApi/WebApi/PackageVersion.cs new file mode 100644 index 00000000000..2c92246c705 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/PackageVersion.cs @@ -0,0 +1,108 @@ +using System; +using System.Globalization; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class PackageVersion : IComparable, IEquatable + { + public PackageVersion() + { + } + + public PackageVersion(String version) + { + Int32 major, minor, patch; + String semanticVersion; + + VersionParser.ParseVersion(version, out major, out minor, out patch, out semanticVersion); + Major = major; + Minor = minor; + Patch = patch; + } + + public static Boolean TryParse(String versionStr, out PackageVersion version) + { + version = null; + + try + { + version = new PackageVersion(versionStr); + return true; + } + catch + { + return false; + } + } + + private PackageVersion(PackageVersion versionToClone) + { + this.Major = versionToClone.Major; + this.Minor = versionToClone.Minor; + this.Patch = versionToClone.Patch; + } + + [DataMember] + public Int32 Major + { + get; + set; + } + + [DataMember] + public Int32 Minor + { + get; + set; + } + + [DataMember] + public Int32 Patch + { + get; + set; + } + + public PackageVersion Clone() + { + return new PackageVersion(this); + } + + public static implicit operator String(PackageVersion version) + { + return version.ToString(); + } + + public override String ToString() + { + return String.Format(CultureInfo.InvariantCulture, "{0}.{1}.{2}", Major, Minor, Patch); + } + + public override int GetHashCode() + { + return this.ToString().GetHashCode(); + } + + public Int32 CompareTo(PackageVersion other) + { + Int32 rc = Major.CompareTo(other.Major); + if (rc == 0) + { + rc = Minor.CompareTo(other.Minor); + if (rc == 0) + { + rc = Patch.CompareTo(other.Patch); + } + } + + return rc; + } + + public Boolean Equals(PackageVersion other) + { + return this.CompareTo(other) == 0; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/PlanEnvironment.cs b/src/Sdk/DTWebApi/WebApi/PlanEnvironment.cs new file mode 100644 index 00000000000..8de5134d17a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/PlanEnvironment.cs @@ -0,0 +1,156 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.DistributedTask.Pipelines; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class PlanEnvironment : IOrchestrationEnvironment + { + public PlanEnvironment() + { + } + + private PlanEnvironment(PlanEnvironment environmentToClone) + { + if (environmentToClone.m_options != null) + { + m_options = m_options.ToDictionary(x => x.Key, x => x.Value.Clone()); + } + + if (environmentToClone.m_maskHints != null) + { + m_maskHints = environmentToClone.m_maskHints.Select(x => x.Clone()).ToList(); + } + + if (environmentToClone.m_variables != null) + { + m_variables = new VariablesDictionary(environmentToClone.m_variables); + } + } + + /// + /// Gets the collection of mask hints + /// + public List MaskHints + { + get + { + if (m_maskHints == null) + { + m_maskHints = new List(); + } + return m_maskHints; + } + } + + /// + /// Gets the collection of options associated with the current context. + /// + /// This is being deprecated and should not be used + public IDictionary Options + { + get + { + if (m_options == null) + { + m_options = new Dictionary(); + } + return m_options; + } + } + + /// + /// Gets the collection of variables associated with the current context. + /// + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new VariablesDictionary(); + } + return m_variables; + } + } + + OrchestrationProcessType IOrchestrationEnvironment.ProcessType + { + get + { + return OrchestrationProcessType.Container; + } + } + + IDictionary IOrchestrationEnvironment.Variables + { + get + { + if (m_variables == null) + { + m_variables = new VariablesDictionary(); + } + return m_variables; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedOptions, ref m_options, true); + SerializationHelper.Copy(ref m_serializedMaskHints, ref m_maskHints, true); + + var secretNames = new HashSet(m_maskHints?.Where(x => x.Type == MaskType.Variable).Select(x => x.Value) ?? new String[0], StringComparer.OrdinalIgnoreCase); + if (m_serializedVariables != null && m_serializedVariables.Count > 0) + { + m_variables = new VariablesDictionary(); + foreach (var variable in m_serializedVariables) + { + m_variables[variable.Key] = new VariableValue(variable.Value, secretNames.Contains(variable.Key)); + } + } + + m_serializedVariables = null; + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedOptions = null; + m_serializedMaskHints = null; + m_serializedVariables = null; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_options, ref m_serializedOptions); + SerializationHelper.Copy(ref m_maskHints, ref m_serializedMaskHints); + + if (m_variables != null && m_variables.Count > 0) + { + m_serializedVariables = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var variable in m_variables) + { + m_serializedVariables[variable.Key] = variable.Value?.Value; + } + } + } + + private List m_maskHints; + private Dictionary m_options; + private VariablesDictionary m_variables; + + [DataMember(Name = "Mask", EmitDefaultValue = false)] + private List m_serializedMaskHints; + + [DataMember(Name = "Options", EmitDefaultValue = false)] + private Dictionary m_serializedOptions; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_serializedVariables; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/PlanGroupStatus.cs b/src/Sdk/DTWebApi/WebApi/PlanGroupStatus.cs new file mode 100644 index 00000000000..3ec90c1f315 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/PlanGroupStatus.cs @@ -0,0 +1,19 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [Flags] + [DataContract] + public enum PlanGroupStatus + { + [EnumMember] + Running = 1, + + [EnumMember] + Queued = 2, + + [EnumMember] + All = Running | Queued + } +} diff --git a/src/Sdk/DTWebApi/WebApi/PlanTemplateType.cs b/src/Sdk/DTWebApi/WebApi/PlanTemplateType.cs new file mode 100644 index 00000000000..96f5c4c35e0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/PlanTemplateType.cs @@ -0,0 +1,19 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + public enum PlanTemplateType + { + [DataMember] + None = 0, + + [DataMember] + Designer = 1, + + [DataMember] + System = 2, + + [DataMember] + Yaml = 3, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ProjectReference.cs b/src/Sdk/DTWebApi/WebApi/ProjectReference.cs new file mode 100644 index 00000000000..cb379ebb20f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ProjectReference.cs @@ -0,0 +1,21 @@ +// -------------------------------------------------------------------------------------------------------------------- +// +// 2012-2023, All rights reserved. +// +// -------------------------------------------------------------------------------------------------------------------- + +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class ProjectReference + { + [DataMember] + public Guid Id { get; set; } + + [DataMember] + public string Name { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/PublishTaskGroupMetadata.cs b/src/Sdk/DTWebApi/WebApi/PublishTaskGroupMetadata.cs new file mode 100644 index 00000000000..008342781f0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/PublishTaskGroupMetadata.cs @@ -0,0 +1,20 @@ +// -------------------------------------------------------------------------------------------------------------------- +// +// 2012-2023, All rights reserved. +// +// -------------------------------------------------------------------------------------------------------------------- + +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public class PublishTaskGroupMetadata + { + public Guid TaskGroupId { get; set; } + // This is revision of task group that is getting published + public int TaskGroupRevision { get; set; } + public int ParentDefinitionRevision { get; set; } + public Boolean Preview { get; set; } + public String Comment { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ResourceLimit.cs b/src/Sdk/DTWebApi/WebApi/ResourceLimit.cs new file mode 100644 index 00000000000..cd986695bbe --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ResourceLimit.cs @@ -0,0 +1,96 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class ResourceLimit + { + internal ResourceLimit( + Guid hostId, + String parallelismTag, + Boolean isHosted) + { + HostId = hostId; + ParallelismTag = parallelismTag; + IsHosted = isHosted; + } + + [DataMember] + public Guid HostId + { + get; + set; + } + + [DataMember] + public String ParallelismTag + { + get; + set; + } + + [DataMember] + public Boolean IsHosted + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32? TotalCount + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32? TotalMinutes + { + get; + set; + } + + [DataMember] + public Boolean IsPremium + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean FailedToReachAllProviders + { + get; + set; + } + + public IDictionary Data + { + get + { + if (m_resourceLimitsData == null) + { + m_resourceLimitsData = new Dictionary(); + } + + return m_resourceLimitsData; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_resourceLimitsData?.Count == 0) + { + m_resourceLimitsData = null; + } + } + + [DataMember(Name = "ResourceLimitsData", EmitDefaultValue = false)] + private IDictionary m_resourceLimitsData; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ResourceUsage.cs b/src/Sdk/DTWebApi/WebApi/ResourceUsage.cs new file mode 100644 index 00000000000..053ad02bb43 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ResourceUsage.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public class ResourceUsage + { + [DataMember] + public ResourceLimit ResourceLimit + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32? UsedCount + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int32? UsedMinutes + { + get; + set; + } + + public IList RunningRequests + { + get + { + if (m_runningRequests == null) + { + m_runningRequests = new List(); + } + + return m_runningRequests; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_runningRequests?.Count == 0) + { + m_runningRequests = null; + } + } + + [DataMember(Name = "RunningRequests", EmitDefaultValue = false)] + private IList m_runningRequests; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/SecureFile.cs b/src/Sdk/DTWebApi/WebApi/SecureFile.cs new file mode 100644 index 00000000000..c9a40b2f3b8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/SecureFile.cs @@ -0,0 +1,109 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class SecureFile + { + public SecureFile() + { + } + + private SecureFile(SecureFile secureFile, Boolean shallow = false) + { + this.Id = secureFile.Id; + this.Name = secureFile.Name; + this.Ticket = secureFile.Ticket; + + if (!shallow) + { + this.Properties = secureFile.Properties; + this.CreatedBy = secureFile.CreatedBy; + this.CreatedOn = secureFile.CreatedOn; + this.ModifiedBy = secureFile.ModifiedBy; + this.ModifiedOn = secureFile.ModifiedOn; + } + } + + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + public IDictionary Properties + { + get + { + if (m_properties == null) + { + m_properties = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_properties; + } + set + { + m_properties = value; + } + } + + [DataMember(EmitDefaultValue = false)] + public IdentityRef CreatedBy + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime CreatedOn + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public IdentityRef ModifiedBy + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime ModifiedOn + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Ticket + { + get; + set; + } + + public SecureFile Clone() + { + return new SecureFile(this); + } + + public SecureFile CloneShallow() + { + return new SecureFile(this, true); + } + + [DataMember(EmitDefaultValue = false, Name = "Properties")] + private IDictionary m_properties; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/SecureFileActionFilter.cs b/src/Sdk/DTWebApi/WebApi/SecureFileActionFilter.cs new file mode 100644 index 00000000000..700a458ff6e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/SecureFileActionFilter.cs @@ -0,0 +1,19 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [Flags] + [DataContract] + public enum SecureFileActionFilter + { + [EnumMember] + None = 0, + + [EnumMember] + Manage = 2, + + [EnumMember] + Use = 16, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServerTaskRequestMessage.cs b/src/Sdk/DTWebApi/WebApi/ServerTaskRequestMessage.cs new file mode 100644 index 00000000000..e4b62775c79 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServerTaskRequestMessage.cs @@ -0,0 +1,43 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class ServerTaskRequestMessage : JobRequestMessage + { + internal ServerTaskRequestMessage() + : base(JobRequestMessageTypes.ServerTaskRequest) + { + } + + public ServerTaskRequestMessage( + TaskOrchestrationPlanReference plan, + TimelineReference timeline, + Guid jobId, + String jobName, + String jobRefName, + JobEnvironment environment, + TaskInstance taskInstance, + TaskDefinition taskDefinition) + : base(JobRequestMessageTypes.ServerJobRequest, plan, timeline, jobId, jobName, jobRefName, environment) + { + TaskDefinition = taskDefinition; + TaskInstance = taskInstance; + } + + [DataMember] + public TaskDefinition TaskDefinition + { + get; + private set; + } + + [DataMember] + public TaskInstance TaskInstance + { + get; + private set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServerTaskSectionExecutionOutput.cs b/src/Sdk/DTWebApi/WebApi/ServerTaskSectionExecutionOutput.cs new file mode 100644 index 00000000000..8cba67674bf --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServerTaskSectionExecutionOutput.cs @@ -0,0 +1,14 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [ClientIgnore] + public class ServerTaskSectionExecutionOutput + { + [DataMember] + public Boolean? IsCompleted { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadLoginPromptOption.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadLoginPromptOption.cs new file mode 100644 index 00000000000..b13639f9e01 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadLoginPromptOption.cs @@ -0,0 +1,46 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum AadLoginPromptOption + { + /// + /// Do not provide a prompt option + /// + [EnumMember] + NoOption = 0, + + /// + /// Force the user to login again. + /// + [EnumMember] + Login = 1, + + /// + /// Force the user to select which account they are logging in with instead of + /// automatically picking the user up from the session state. + /// NOTE: This does not work for switching bewtween the variants of a dual-homed user. + /// + [EnumMember] + SelectAccount = 2, + + /// + /// Force the user to login again. + /// + /// Ignore current authentication state and force the user to authenticate again. This option should be used instead of Login. + /// + /// + [EnumMember] + FreshLogin = 3, + + /// + /// Force the user to login again with mfa. + /// + /// Ignore current authentication state and force the user to authenticate again. This option should be used instead of Login, if MFA is required. + /// + /// + [EnumMember] + FreshLoginWithMfa = 4 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenRequest.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenRequest.cs new file mode 100644 index 00000000000..f2de2c74d4c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenRequest.cs @@ -0,0 +1,21 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class AadOauthTokenRequest + { + [DataMember] + public String Token { get; set; } + + [DataMember] + public String Resource { get; set; } + + [DataMember] + public String TenantId { get; set; } + + [DataMember] + public Boolean Refresh { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenResult.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenResult.cs new file mode 100644 index 00000000000..89dc4d83429 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AadOauthTokenResult.cs @@ -0,0 +1,15 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class AadOauthTokenResult + { + [DataMember] + public String AccessToken { get; set; } + + [DataMember] + public String RefreshTokenCache { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureKeyVaultPermission.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureKeyVaultPermission.cs new file mode 100644 index 00000000000..ed53bd30513 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureKeyVaultPermission.cs @@ -0,0 +1,16 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class AzureKeyVaultPermission : AzureResourcePermission + { + [DataMember] + public String Vault { get; set; } + + public AzureKeyVaultPermission() : base(AzurePermissionResourceProviders.AzureKeyVaultPermission) + { + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroup.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroup.cs new file mode 100644 index 00000000000..db603f1e357 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroup.cs @@ -0,0 +1,40 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Azure Management Group + /// + [DataContract] + public class AzureManagementGroup + { + /// + /// Azure management group name + /// + [DataMember] + [JsonProperty(PropertyName = "Name")] + public String Name { get; set; } + + /// + /// Id of azure management group + /// + [DataMember] + [JsonProperty(PropertyName = "Id")] + public String Id { get; set; } + + /// + /// Display name of azure management group + /// + [DataMember] + [JsonProperty(PropertyName = "displayName")] + public String DisplayName { get; set; } + + /// + /// Id of tenant from which azure management group belogs + /// + [DataMember] + public String TenantId { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroupQueryResult.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroupQueryResult.cs new file mode 100644 index 00000000000..10e61b42146 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureManagementGroupQueryResult.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Azure management group query result + /// + [DataContract] + public class AzureManagementGroupQueryResult + { + /// + /// List of azure management groups + /// + [DataMember] + [JsonProperty("value")] + public List Value; + + /// + /// Error message in case of an exception + /// + [DataMember] + public string ErrorMessage; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermission.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermission.cs new file mode 100644 index 00000000000..13032068f52 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermission.cs @@ -0,0 +1,130 @@ +using System; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [JsonConverter(typeof(AzurePermissionJsonConverter))] + [KnownType(typeof(AzureKeyVaultPermission))] + [DataContract] + public abstract class AzurePermission + { + [DataMember] + public String ResourceProvider { get; set; } + + [DataMember(EmitDefaultValue = true)] + public Boolean Provisioned { get; set; } + + internal AzurePermission(String resourceProvider) + { + this.ResourceProvider = resourceProvider; + } + } + + internal sealed class AzurePermissionJsonConverter : VssSecureJsonConverter + { + public override Boolean CanRead + { + get + { + return true; + } + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(AzurePermission).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader == null) + { + throw new ArgumentNullException("reader"); + } + + if (serializer == null) + { + throw new ArgumentNullException("serializer"); + } + + if (reader.TokenType != JsonToken.StartObject) + { + return existingValue; + } + + var contract = serializer.ContractResolver.ResolveContract(objectType) as JsonObjectContract; + if (contract == null) + { + return existingValue; + } + + JsonProperty resourceProviderProperty = contract.Properties.GetClosestMatchProperty("ResourceProvider"); + if (resourceProviderProperty == null) + { + return existingValue; + } + + JToken itemTypeValue; + JObject value = JObject.Load(reader); + + if (!value.TryGetValue(resourceProviderProperty.PropertyName, StringComparison.OrdinalIgnoreCase, out itemTypeValue)) + { + return existingValue; + } + + if (itemTypeValue.Type != JTokenType.String) + { + throw new NotSupportedException("ResourceProvider property is mandatory for azure permission"); + } + + string resourceProvider = (string)itemTypeValue; + AzurePermission returnValue = null; + switch (resourceProvider) + { + case AzurePermissionResourceProviders.AzureRoleAssignmentPermission: + returnValue = new AzureRoleAssignmentPermission(); + break; + case AzurePermissionResourceProviders.AzureKeyVaultPermission: + returnValue = new AzureKeyVaultPermission(); + break; + default: + throw new NotSupportedException($"{resourceProvider} is not a supported resource provider for azure permission"); + } + + if (returnValue != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, returnValue); + } + } + + return returnValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermissionResourceProviders.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermissionResourceProviders.cs new file mode 100644 index 00000000000..0401d87cd36 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzurePermissionResourceProviders.cs @@ -0,0 +1,12 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [GenerateAllConstants] + public static class AzurePermissionResourceProviders + { + public const String AzureRoleAssignmentPermission = "Microsoft.RoleAssignment"; + public const String AzureKeyVaultPermission = "Microsoft.KeyVault"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureResourcePermission.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureResourcePermission.cs new file mode 100644 index 00000000000..60bdc4c066a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureResourcePermission.cs @@ -0,0 +1,16 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public abstract class AzureResourcePermission : AzurePermission + { + [DataMember] + public String ResourceGroup { get; set; } + + protected AzureResourcePermission(String resourceProvider) : base(resourceProvider) + { + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureRoleAssignmentPermission.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureRoleAssignmentPermission.cs new file mode 100644 index 00000000000..a608ae89e6d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureRoleAssignmentPermission.cs @@ -0,0 +1,17 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class AzureRoleAssignmentPermission : AzurePermission + { + + [DataMember] + public Guid RoleAssignmentId { get; set; } + + public AzureRoleAssignmentPermission() : base(AzurePermissionResourceProviders.AzureRoleAssignmentPermission) + { + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscription.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscription.cs new file mode 100644 index 00000000000..ec592bbb21e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscription.cs @@ -0,0 +1,24 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class AzureSubscription + { + [DataMember] + [JsonProperty(PropertyName = "displayName")] + public String DisplayName { get; set; } + + [DataMember] + [JsonProperty(PropertyName = "subscriptionId")] + public String SubscriptionId { get; set; } + + [DataMember] + public String SubscriptionTenantId { get; set; } + + [DataMember] + public String SubscriptionTenantName { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscriptionQueryResult.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscriptionQueryResult.cs new file mode 100644 index 00000000000..998c44b9d5d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/AzureSubscriptionQueryResult.cs @@ -0,0 +1,17 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class AzureSubscriptionQueryResult + { + [DataMember] + [JsonProperty("value")] + public List Value; + + [DataMember] + public string ErrorMessage; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/DataSourceBinding.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/DataSourceBinding.cs new file mode 100644 index 00000000000..e80fb9f137c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/DataSourceBinding.cs @@ -0,0 +1,29 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +using CommonContracts = GitHub.DistributedTask.Common.Contracts; + +namespace GitHub.DistributedTask.WebApi +{ + public class DataSourceBinding : CommonContracts.DataSourceBindingBase + { + public DataSourceBinding() + : base() + { + } + + private DataSourceBinding(DataSourceBinding inputDefinitionToClone) + : base(inputDefinitionToClone) + { + + } + + public DataSourceBinding Clone() + { + return new DataSourceBinding(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/EndpointAuthorization.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/EndpointAuthorization.cs new file mode 100644 index 00000000000..dff449c353b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/EndpointAuthorization.cs @@ -0,0 +1,120 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + public sealed class EndpointAuthorizationSchemes + { + public const String AzureStorage = "AzureStorage"; + public const String OAuth = "OAuth"; + public const String OAuth2 = "OAuth2"; + public const String OAuthWrap = "OAuthWrap"; + public const String Certificate = "Certificate"; + public const String UsernamePassword = "UsernamePassword"; + public const String Token = "Token"; + public const String PersonalAccessToken = "PersonalAccessToken"; + public const String ServicePrincipal = "ServicePrincipal"; + public const String None = "None"; + public const String Jwt = "JWT"; + public const String InstallationToken = "InstallationToken"; + } + + public sealed class EndpointAuthorizationParameters + { + public const String Username = "Username"; + public const String Password = "Password"; + public const String Certificate = "Certificate"; + public const String AccessToken = "AccessToken"; + public const String ApiToken = "ApiToken"; + public const String RefreshToken = "RefreshToken"; + public const String ServicePrincipalId = "ServicePrincipalId"; + public const String ServicePrincipalKey = "ServicePrincipalKey"; + public const String TenantId = "TenantId"; + public const String RealmName = "RealmName"; + public const String IdToken = "IdToken"; + public const String Nonce = "nonce"; + public const String Scope = "Scope"; + public const String Role = "Role"; + public const String ServerCertThumbprint = "ServerCertThumbprint"; + public const String CompleteCallbackPayload = "CompleteCallbackPayload"; + public const String ClientMail = "ClientMail"; + public const String PrivateKey = "PrivateKey"; + public const String Issuer = "Issuer"; + public const String Audience = "Audience"; + public const String StorageAccountName = "StorageAccountName"; + public const String StorageAccessKey = "StorageAccessKey"; + public const String AccessTokenType = "AccessTokenType"; + public const String Signature = "Signature"; + } + + [DataContract] + public sealed class EndpointAuthorization + { + public EndpointAuthorization() + { + } + + private EndpointAuthorization(EndpointAuthorization authorizationToClone) + { + this.Scheme = authorizationToClone.Scheme; + if (authorizationToClone.m_parameters != null && authorizationToClone.m_parameters.Count > 0) + { + m_parameters = new Dictionary(authorizationToClone.m_parameters, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// Gets or sets the scheme used for service endpoint authentication. + /// + [DataMember] + public String Scheme + { + get; + set; + } + + public IDictionary Parameters + { + get + { + if (m_parameters == null) + { + m_parameters = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_parameters; + } + } + + public EndpointAuthorization Clone() + { + return new EndpointAuthorization(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedParameters, ref m_parameters, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_parameters, ref m_serializedParameters, StringComparer.OrdinalIgnoreCase); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedParameters = null; + } + + private IDictionary m_parameters; + + /// + /// Gets or sets the parameters for the selected authorization scheme. + /// + [DataMember(Name = "Parameters", EmitDefaultValue = false)] + private IDictionary m_serializedParameters; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/SerializationHelper.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/SerializationHelper.cs new file mode 100644 index 00000000000..ecf94c9968f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/SerializationHelper.cs @@ -0,0 +1,107 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.WebApi +{ + public static class SerializationHelper + { + public static void Copy( + ref List source, + ref List target, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new List(source); + } + + if (clearSource) + { + source = null; + } + } + + public static void Copy( + ref IList source, + ref ISet target, + IEqualityComparer comparer, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new HashSet(source, comparer); + } + + if (clearSource) + { + source = null; + } + } + + public static void Copy( + ref ISet source, + ref IList target, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new List(source); + } + + if (clearSource) + { + source = null; + } + } + + public static void Copy( + ref Dictionary source, + ref Dictionary target, + Boolean clearSource = false) + { + Copy(ref source, ref target, EqualityComparer.Default, clearSource); + } + + public static void Copy( + ref IDictionary source, + ref IDictionary target, + Boolean clearSource = false) + { + Copy(ref source, ref target, EqualityComparer.Default, clearSource); + } + + public static void Copy( + ref Dictionary source, + ref Dictionary target, + IEqualityComparer comparer, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new Dictionary(source, comparer); + } + + if (clearSource) + { + source = null; + } + } + + public static void Copy( + ref IDictionary source, + ref IDictionary target, + IEqualityComparer comparer, + Boolean clearSource = false) + { + if (source != null && source.Count > 0) + { + target = new Dictionary(source, comparer); + } + + if (clearSource) + { + source = null; + } + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpoint.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpoint.cs new file mode 100644 index 00000000000..3eeca3525d5 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpoint.cs @@ -0,0 +1,296 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using GitHub.Services.Common.Internal; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents an endpoint which may be used by an orchestration job. + /// + [DataContract] + public class ServiceEndpoint + { + /// + /// Constructs an ServiceEndpoint instance with empty values. + /// + public ServiceEndpoint() + { + m_data = new Dictionary(StringComparer.OrdinalIgnoreCase); + IsReady = true; + } + + private ServiceEndpoint(ServiceEndpoint endpointToClone) + { + Id = endpointToClone.Id; + Name = endpointToClone.Name; + Type = endpointToClone.Type; + Url = endpointToClone.Url; + Description = endpointToClone.Description; + GroupScopeId = endpointToClone.GroupScopeId; + AdministratorsGroup = endpointToClone.AdministratorsGroup; + ReadersGroup = endpointToClone.ReadersGroup; + + if (endpointToClone.Authorization != null) + { + Authorization = endpointToClone.Authorization.Clone(); + } + + if (endpointToClone.m_data != null) + { + m_data = new Dictionary(endpointToClone.m_data, StringComparer.OrdinalIgnoreCase); + } + } + + public static bool ValidateServiceEndpoint(ServiceEndpoint endpoint, ref string message) + { + if (endpoint == null) + { + message = "endpoint: null"; + return false; + } + + if (endpoint.Id == Guid.Empty) + { + message = CommonResources.EmptyGuidNotAllowed("endpoint.Id"); + return false; + } + + if (string.IsNullOrEmpty(endpoint.Name)) + { + message = string.Format("{0}:{1}", CommonResources.EmptyStringNotAllowed(), "endpoint.Name"); + return false; + } + + if (endpoint.Url == null) + { + message = "endpoint.Url: null"; + return false; + } + + if (string.IsNullOrEmpty(endpoint.Type)) + { + message = string.Format("{0}:{1}", CommonResources.EmptyStringNotAllowed(), "endpoint.Type"); + return false; + } + + if (endpoint.Authorization == null) + { + message = "endpoint.Authorization: null"; + return false; + } + + return true; + } + + /// + /// Gets or sets the identifier of this endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + /// + /// Gets or sets the friendly name of the endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Gets or sets the type of the endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// Gets or sets the owner of the endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public String Owner + { + get; + set; + } + + /// + /// Gets or sets the url of the endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public Uri Url + { + get; + set; + } + + /// + /// Gets or sets the identity reference for the user who created the Service endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef CreatedBy + { + get; + set; + } + + + /// + /// Gets or sets the description of endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public string Description + { + get; + set; + } + + /// + /// Gets or sets the authorization data for talking to the endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public EndpointAuthorization Authorization + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid GroupScopeId + { + get; + internal set; + } + + /// + /// Gets or sets the identity reference for the administrators group of the service endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef AdministratorsGroup + { + get; + internal set; + } + + /// + /// Gets or sets the identity reference for the readers group of the service endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef ReadersGroup + { + get; + internal set; + } + + /// + /// Gets the custom data associated with this endpoint. + /// + public IDictionary Data + { + get + { + return m_data; + } + + set + { + if (value != null) + { + m_data = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + } + + /// + /// Indicates whether service endpoint is shared with other projects or not. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean IsShared + { + get; + set; + } + + /// + /// EndPoint state indictor + /// + [DataMember(EmitDefaultValue = true)] + [JsonConverter(typeof(EndpointIsReadyConverter))] + public bool IsReady + { + get; + set; + } + + /// + /// Error message during creation/deletion of endpoint + /// + [DataMember(EmitDefaultValue = false)] + public JObject OperationStatus + { + get; + set; + } + + /// + /// Performs a deep clone of the ServiceEndpoint instance. + /// + /// A new ServiceEndpoint instance identical to the current instance + public ServiceEndpoint Clone() + { + return new ServiceEndpoint(this); + } + + [DataMember(EmitDefaultValue = false, Name = "Data")] + private Dictionary m_data; + } + + internal class EndpointIsReadyConverter : JsonConverter + { + public override bool CanConvert(Type objectType) + { + // we are converting every non-assignable thing to true + return true; + } + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.Boolean || reader.TokenType == JsonToken.Integer) + { + return serializer.Deserialize(reader); + } + else if (reader.TokenType == JsonToken.String) + { + var s = (string)reader.Value; + + if (s.Equals("false", StringComparison.OrdinalIgnoreCase) || s.Equals("0", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + return true; + } + else + { + return true; + } + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + writer.WriteValue((bool)value ? true : false); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpointTypes.cs b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpointTypes.cs new file mode 100644 index 00000000000..4fcf8f97bd8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ServiceEndpointLegacy/ServiceEndpointTypes.cs @@ -0,0 +1,91 @@ +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class ServiceEndpointTypes + { + /// + /// Azure endpoint + /// + public const String Azure = "Azure"; + + /// + /// Chef endpoint + /// + public const String Chef = "Chef"; + + /// Chef endpoint + /// + public const String ExternalTfs = "ExternalTfs"; + + /// + /// Generic endpoint + /// + public const String Generic = "Generic"; + + /// + /// GitHub endpoint + /// + public const String GitHub = "GitHub"; + + /// + /// GitHub Enterprise endpoint + /// + public const String GitHubEnterprise = "GitHubEnterprise"; + + /// + /// Bitbucket endpoint + /// + public const String Bitbucket = "Bitbucket"; + + /// + /// SSH endpoint + /// + public const String SSH = "SSH"; + + /// + /// Subversion endpoint + /// + public const String Subversion = "Subversion"; + + /// + ///Gcp endpoint + /// + public const String Gcp = "google-cloud"; + + /// + /// Subversion endpoint + /// + public const String Jenkins = "Jenkins"; + + /// + /// External Git repository + /// + public const String ExternalGit = "Git"; + + /// + /// Azure RM endpoint + /// + public const String AzureRM = "AzureRM"; + + /// + /// Azure Deployment Manager + /// + public const String AzureDeploymentManager = "AzureDeploymentManager"; + + /// + /// Azure Service Fabric + /// + public const String AzureServiceFabric = "ServiceFabric"; + + /// + /// Azure Service Fabric + /// + public const String Docker = "dockerregistry"; + + /// + /// Jira + /// + public const String Jira = "Jira"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgent.cs b/src/Sdk/DTWebApi/WebApi/TaskAgent.cs new file mode 100644 index 00000000000..99aa704af05 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgent.cs @@ -0,0 +1,223 @@ +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + public static class AgentConstants + { + // 1.x agent has been deprecated. + public static readonly String Version = "0.0.0"; + } + + /// + /// A task agent. + /// + [DataContract] + public class TaskAgent : TaskAgentReference, ICloneable + { + internal TaskAgent() + { + this.ProvisioningState = TaskAgentProvisioningStateConstants.Provisioned; + } + + public TaskAgent(String name) + { + this.Name = name; + this.ProvisioningState = TaskAgentProvisioningStateConstants.Provisioned; + } + + internal TaskAgent(TaskAgentReference reference) + : base(reference) + { + } + + private TaskAgent(TaskAgent agentToBeCloned) + : base(agentToBeCloned) + { + this.CreatedOn = agentToBeCloned.CreatedOn; + this.MaxParallelism = agentToBeCloned.MaxParallelism; + this.StatusChangedOn = agentToBeCloned.StatusChangedOn; + + if (agentToBeCloned.AssignedRequest != null) + { + this.AssignedRequest = agentToBeCloned.AssignedRequest.Clone(); + } + + if (agentToBeCloned.Authorization != null) + { + this.Authorization = agentToBeCloned.Authorization.Clone(); + } + + if (agentToBeCloned.m_properties != null && agentToBeCloned.m_properties.Count > 0) + { + m_properties = new PropertiesCollection(agentToBeCloned.m_properties); + } + + if (agentToBeCloned.m_systemCapabilities != null && agentToBeCloned.m_systemCapabilities.Count > 0) + { + m_systemCapabilities = new Dictionary(agentToBeCloned.m_systemCapabilities, StringComparer.OrdinalIgnoreCase); + } + + if (agentToBeCloned.m_userCapabilities != null && agentToBeCloned.m_userCapabilities.Count > 0) + { + m_userCapabilities = new Dictionary(agentToBeCloned.m_userCapabilities, StringComparer.OrdinalIgnoreCase); + } + + if (agentToBeCloned.PendingUpdate != null) + { + this.PendingUpdate = agentToBeCloned.PendingUpdate.Clone(); + } + } + + /// + /// Maximum job parallelism allowed for this agent. + /// + [DataMember] + public Int32? MaxParallelism + { + get; + set; + } + + /// + /// Date on which this agent was created. + /// + [DataMember] + public DateTime CreatedOn + { + get; + internal set; + } + + /// + /// Date on which the last connectivity status change occurred. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? StatusChangedOn + { + get; + internal set; + } + + /// + /// The request which is currently assigned to this agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentJobRequest AssignedRequest + { + get; + internal set; + } + + /// + /// The last request which was completed by this agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentJobRequest LastCompletedRequest + { + get; + internal set; + } + + /// + /// Authorization information for this agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentAuthorization Authorization + { + get; + set; + } + + /// + /// Pending update for this agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentUpdate PendingUpdate + { + get; + internal set; + } + + /// + /// The agent cloud request that's currently associated with this agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentCloudRequest AssignedAgentCloudRequest + { + get; + internal set; + } + + /// + /// System-defined capabilities supported by this agent's host. + /// + public IDictionary SystemCapabilities + { + get + { + if (m_systemCapabilities == null) + { + m_systemCapabilities = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_systemCapabilities; + } + } + + /// + /// User-defined capabilities supported by this agent's host. + /// + public IDictionary UserCapabilities + { + get + { + if (m_userCapabilities == null) + { + m_userCapabilities = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_userCapabilities; + } + } + + /// + /// Properties which may be used to extend the storage fields available + /// for a given machine instance. + /// + public PropertiesCollection Properties + { + get + { + if (m_properties == null) + { + m_properties = new PropertiesCollection(); + } + return m_properties; + } + internal set + { + m_properties = value; + } + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + public new TaskAgent Clone() + { + return new TaskAgent(this); + } + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Properties")] + private PropertiesCollection m_properties; + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "SystemCapabilities")] + private Dictionary m_systemCapabilities; + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "UserCapabilities")] + private Dictionary m_userCapabilities; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentAuthorization.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentAuthorization.cs new file mode 100644 index 00000000000..f9ee40046f6 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentAuthorization.cs @@ -0,0 +1,69 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Provides data necessary for authorizing the agent using OAuth 2.0 authentication flows. + /// + [DataContract] + public sealed class TaskAgentAuthorization + { + /// + /// Initializes a new TaskAgentAuthorization instance with default values. + /// + public TaskAgentAuthorization() + { + } + + private TaskAgentAuthorization(TaskAgentAuthorization objectToBeCloned) + { + this.AuthorizationUrl = objectToBeCloned.AuthorizationUrl; + this.ClientId = objectToBeCloned.ClientId; + + if (objectToBeCloned.PublicKey != null) + { + this.PublicKey = objectToBeCloned.PublicKey.Clone(); + } + } + + /// + /// Endpoint used to obtain access tokens from the configured token service. + /// + [DataMember(EmitDefaultValue = false)] + public Uri AuthorizationUrl + { + get; + set; + } + + /// + /// Client identifier for this agent. + /// + [DataMember(EmitDefaultValue = false)] + public Guid ClientId + { + get; + set; + } + + /// + /// Public key used to verify the identity of this agent. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentPublicKey PublicKey + { + get; + set; + } + + /// + /// Creates a deep copy of the authorization data. + /// + /// A new TaskAgentAuthorization instance copied from the current instance + public TaskAgentAuthorization Clone() + { + return new TaskAgentAuthorization(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentCloud.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentCloud.cs new file mode 100644 index 00000000000..0462c95c06a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentCloud.cs @@ -0,0 +1,156 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentCloud + { + private TaskAgentCloud(TaskAgentCloud cloudToBeCloned) + { + this.Id = cloudToBeCloned.Id; + this.AgentCloudId = cloudToBeCloned.AgentCloudId; + this.Name = cloudToBeCloned.Name; + this.AcquireAgentEndpoint = cloudToBeCloned.AcquireAgentEndpoint; + this.ReleaseAgentEndpoint = cloudToBeCloned.ReleaseAgentEndpoint; + this.SharedSecret = cloudToBeCloned.SharedSecret; + this.Internal = cloudToBeCloned.Internal; + + if (cloudToBeCloned.GetAgentDefinitionEndpoint != null) + { + this.GetAgentDefinitionEndpoint = cloudToBeCloned.GetAgentDefinitionEndpoint; + } + + if (cloudToBeCloned.GetAgentRequestStatusEndpoint != null) + { + this.GetAgentRequestStatusEndpoint = cloudToBeCloned.GetAgentRequestStatusEndpoint; + } + + if (cloudToBeCloned.AcquisitionTimeout != null) + { + this.AcquisitionTimeout = cloudToBeCloned.AcquisitionTimeout; + } + + if (cloudToBeCloned.GetAccountParallelismEndpoint != null) + { + this.GetAccountParallelismEndpoint = cloudToBeCloned.GetAccountParallelismEndpoint; + } + + if (cloudToBeCloned.MaxParallelism != null) + { + this.MaxParallelism = cloudToBeCloned.MaxParallelism; + } + } + + public TaskAgentCloud() + { + } + + //Id is used for interacting with pool providers, AgentCloudId is internal Id + + [DataMember] + public Guid Id + { + get; + set; + } + + [DataMember] + public Int32 AgentCloudId + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + /// + /// Gets or sets the type of the endpoint. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// Signifies that this Agent Cloud is internal and should not be user-manageable + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Internal + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String SharedSecret + { + get; + set; + } + + /// + /// Gets or sets a AcquireAgentEndpoint using which a request can be made to acquire new agent + /// + [DataMember] + public String AcquireAgentEndpoint + { + get; + set; + } + + [DataMember] + public String ReleaseAgentEndpoint + { + get; + set; + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String GetAgentDefinitionEndpoint + { + get; + set; + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String GetAgentRequestStatusEndpoint + { + get; + set; + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public Int32? AcquisitionTimeout + { + get; + set; + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String GetAccountParallelismEndpoint + { + get; + set; + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public Int32? MaxParallelism + { + get; + set; + } + + public TaskAgentCloud Clone() + { + return new TaskAgentCloud(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentCloudRequest.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentCloudRequest.cs new file mode 100644 index 00000000000..f5917fce750 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentCloudRequest.cs @@ -0,0 +1,116 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentCloudRequest + { + private TaskAgentCloudRequest(TaskAgentCloudRequest requestToBeCloned) + { + this.AgentCloudId = requestToBeCloned.AgentCloudId; + this.RequestId = requestToBeCloned.RequestId; + this.AgentSpecification = requestToBeCloned.AgentSpecification; + this.ProvisionRequestTime = requestToBeCloned.ProvisionRequestTime; + this.ProvisionedTime = requestToBeCloned.ProvisionedTime; + this.AgentConnectedTime = requestToBeCloned.AgentConnectedTime; + this.ReleaseRequestTime = requestToBeCloned.ReleaseRequestTime; + + if (requestToBeCloned.AgentData != null) + { + this.AgentData = new JObject(requestToBeCloned.AgentData); + } + + if (requestToBeCloned.Pool != null) + { + this.Pool = requestToBeCloned.Pool.Clone(); + } + + if(requestToBeCloned.Agent != null) + { + this.Agent = requestToBeCloned.Agent.Clone(); + } + } + + public TaskAgentCloudRequest() + { + } + + [DataMember] + public Int32 AgentCloudId + { + get; + set; + } + + [DataMember] + public Guid RequestId + { + get; + set; + } + + + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolReference Pool + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskAgentReference Agent + { + get; + set; + } + + [DataMember] + public JObject AgentSpecification + { + get; + set; + } + + [DataMember] + public JObject AgentData + { + get; + set; + } + + [DataMember] + public DateTime? ProvisionRequestTime + { + get; + set; + } + + [DataMember] + public DateTime? ProvisionedTime + { + get; + set; + } + + [DataMember] + public DateTime? AgentConnectedTime + { + get; + set; + } + + [DataMember] + public DateTime? ReleaseRequestTime + { + get; + set; + } + + public TaskAgentCloudRequest Clone() + { + return new TaskAgentCloudRequest(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentCloudType.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentCloudType.cs new file mode 100644 index 00000000000..b397cb73e92 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentCloudType.cs @@ -0,0 +1,51 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.FormInput; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentCloudType + { + + public TaskAgentCloudType() + { + } + + /// + /// Gets or sets the name of agent cloud type. + /// + [DataMember] + public String Name + { + get; + set; + } + + /// + /// Gets or sets the display name of agnet cloud type. + /// + [DataMember(EmitDefaultValue = false)] + public String DisplayName { get; set; } + + public List InputDescriptors + { + get + { + return m_inputDescriptors ?? (m_inputDescriptors = new List()); + } + + set + { + m_inputDescriptors = value; + } + } + + /// + /// Gets or sets the input descriptors + /// + [DataMember(EmitDefaultValue = false, Name = "InputDescriptors")] + private List m_inputDescriptors; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentDelaySource.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentDelaySource.cs new file mode 100644 index 00000000000..5b6e87c5e34 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentDelaySource.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentDelaySource : ICloneable + { + public TaskAgentDelaySource(TaskAgentReference taskAgent, IEnumerable delays) + { + TaskAgent = taskAgent; + Delays = delays.ToList(); + } + + [DataMember] + public TaskAgentReference TaskAgent { get; } + + [DataMember] + public List Delays { get; } + + public TimeSpan TotalDelay + { + get + { + if (!m_delay.HasValue) + { + m_delay = Delays.Aggregate(TimeSpan.Zero, (sum, next) => sum + next); + } + + return m_delay.Value; + } + } + + private TimeSpan? m_delay; + + Object ICloneable.Clone() + { + return this.Clone(); + } + + public TaskAgentDelaySource Clone() + { + return new TaskAgentDelaySource(TaskAgent, new List(Delays)); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs new file mode 100644 index 00000000000..908227ccb22 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClient.cs @@ -0,0 +1,722 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [ResourceArea(TaskResourceIds.AreaId)] + public class TaskAgentHttpClient : TaskAgentHttpClientBase + { + public TaskAgentHttpClient( + Uri baseUrl, + VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public TaskAgentHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public TaskAgentHttpClient( + Uri baseUrl, + VssCredentials credentials, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public TaskAgentHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public TaskAgentHttpClient( + Uri baseUrl, + HttpMessageHandler pipeline, + Boolean disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + public Task FinishAgentRequestAsync( + Int32 poolId, + Int64 requestId, + Guid lockToken, + DateTime finishTime, + TaskResult result, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + var request = new TaskAgentJobRequest + { + RequestId = requestId, + FinishTime = finishTime, + Result = result, + }; + + return UpdateAgentRequestAsync(poolId, requestId, lockToken, request, userState, cancellationToken); + } + + public Task> GetAgentsAsync( + int poolId, + string agentName = null, + bool? includeCapabilities = null, + bool? includeAssignedRequest = null, + bool? includeLastCompletedRequest = null, + IEnumerable propertyFilters = null, + IEnumerable demands = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + IEnumerable demandStrings = null; + if (demands != null) + { + demandStrings = demands.Select(d => d.ToString()); + } + return GetAgentsAsync(poolId, agentName, includeCapabilities, includeAssignedRequest, includeLastCompletedRequest, propertyFilters, demandStrings, userState, cancellationToken); + } + + /// + /// [Preview API] Get a secure file + /// + /// Project ID + /// The unique secure file Id + /// If includeDownloadTicket is true and the caller has permissions, a download ticket is included in the response. + /// + /// The cancellation token to cancel operation. + public virtual Task GetSecureFileAsync( + Guid project, + Guid secureFileId, + bool? includeDownloadTicket = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return GetSecureFileAsync(project, secureFileId, includeDownloadTicket, actionFilter: null, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID or project name + /// Name of the secure file to match. Can include wildcards to match multiple files. + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetSecureFilesAsync( + string project, + string namePattern = null, + bool? includeDownloadTickets = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return GetSecureFilesAsync(project, namePattern, includeDownloadTickets, actionFilter: null, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID + /// Name of the secure file to match. Can include wildcards to match multiple files. + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetSecureFilesAsync( + Guid project, + string namePattern = null, + bool? includeDownloadTickets = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return GetSecureFilesAsync(project, namePattern, includeDownloadTickets, actionFilter: null, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID or project name + /// A list of secure file Ids + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetSecureFilesByIdsAsync( + string project, + IEnumerable secureFileIds, + bool? includeDownloadTickets = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return GetSecureFilesByIdsAsync(project, secureFileIds, includeDownloadTickets, actionFilter: null, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get secure files + /// + /// Project ID + /// A list of secure file Ids + /// If includeDownloadTickets is true and the caller has permissions, a download ticket for each secure file is included in the response. + /// + /// The cancellation token to cancel operation. + public virtual Task> GetSecureFilesByIdsAsync( + Guid project, + IEnumerable secureFileIds, + bool? includeDownloadTickets = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return GetSecureFilesByIdsAsync(project, secureFileIds, includeDownloadTickets, actionFilter: null, userState: userState, cancellationToken: cancellationToken); + } + + public async Task GetTaskContentZipAsync( + Guid taskId, + TaskVersion version, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + var routeValues = new { taskId = taskId, versionString = version.ToString() }; + HttpRequestMessage requestMessage = await CreateRequestMessageAsync( + HttpMethod.Get, + TaskResourceIds.Tasks, + routeValues: routeValues, + version: m_currentApiVersion).ConfigureAwait(false); + + requestMessage.Headers.Accept.Clear(); + var header = new MediaTypeWithQualityHeaderValue("application/zip"); + header.Parameters.Add(new NameValueHeaderValue("api-version", m_currentApiVersion.ApiVersionString)); + header.Parameters.Add(new NameValueHeaderValue("res-version", "1")); + requestMessage.Headers.Accept.Add(header); + + HttpResponseMessage response = await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + + response.EnsureSuccessStatusCode(); + + if (response.StatusCode == HttpStatusCode.NoContent) + { + throw new Exception("no content"); + } + + if (!VssStringComparer.ContentType.Equals(response.Content.Headers.ContentType.MediaType, "application/zip")) + { + throw new Exception("bad content type"); + } + + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + return new GZipStream(await response.Content.ReadAsStreamAsync().ConfigureAwait(false), CompressionMode.Decompress); + } + + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + + public Task QueueAgentRequestByPoolAsync( + Int32 poolId, + IList demands, + Guid serviceOwner, + Guid hostId, + Guid scopeIdentifier, + String hubName, + Guid planId, + Guid jobId, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + var request = new TaskAgentJobRequest + { + ServiceOwner = serviceOwner, + HostId = hostId, + PlanType = hubName, + ScopeId = scopeIdentifier, + PlanId = planId, + JobId = jobId, + Demands = demands, + }; + + return QueueAgentRequestByPoolAsync(poolId, request, userState, cancellationToken); + } + + public Task RenewAgentRequestAsync( + Int32 poolId, + Int64 requestId, + Guid lockToken, + DateTime? expiresOn = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + var request = new TaskAgentJobRequest + { + RequestId = requestId, + LockedUntil = expiresOn, + }; + + return UpdateAgentRequestAsync(poolId, requestId, lockToken, request, userState, cancellationToken); + } + + public Task ReplaceAgentAsync( + Int32 poolId, + TaskAgent agent, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(agent, "agent"); + return ReplaceAgentAsync(poolId, agent.Id, agent, userState, cancellationToken); + } + + public Task SendMessageAsync( + Int32 poolId, + Int64 requestId, + AgentJobRequestMessage request, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + var message = new TaskAgentMessage + { + Body = JsonUtility.ToString(request), + MessageType = request.MessageType, + }; + + return SendMessageAsync(poolId, requestId, message, userState: userState, cancellationToken: cancellationToken); + } + + public Task SendMessageAsync( + Int32 poolId, + Int64 requestId, + JobCancelMessage cancel, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + var message = new TaskAgentMessage + { + Body = JsonUtility.ToString(cancel), + MessageType = JobCancelMessage.MessageType, + }; + + return SendMessageAsync(poolId, requestId, message, userState: userState, cancellationToken: cancellationToken); + } + + public async Task UploadTaskZipAsync( + Guid taskId, + Stream fileStream, + Boolean overwrite = false, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(fileStream, "fileStream"); + + HttpRequestMessage requestMessage; + + if (fileStream.Length == 0) + { + throw new Exception("file stream of length 0 not allowed."); + } + + if (fileStream.Length > 16 * 1024 * 1024) + { + throw new Exception("file stream too big"); + } + + Byte[] dataToSend = new Byte[fileStream.Length]; + + List> queryParameters = null; + if (overwrite) + { + queryParameters = new List>(); + queryParameters.Add("overwrite", "true"); + } + + var routeValues = new + { + taskId = taskId + }; + + requestMessage = await CreateRequestMessageAsync(HttpMethod.Put, + TaskResourceIds.Tasks, + routeValues: routeValues, + version: m_currentApiVersion, + queryParameters: queryParameters, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false); + + // inorder for the upload to be retryable, we need the content to be re-readable + // to ensure this we copy the chunk into a byte array and send that + // chunk size ensures we can convert the length to an int + int bytesToCopy = (int)fileStream.Length; + using (MemoryStream ms = new MemoryStream(dataToSend)) + { + await fileStream.CopyToAsync(ms, bytesToCopy, cancellationToken).ConfigureAwait(false); + } + + // set the content and the Content-Range header + HttpContent byteArrayContent = new ByteArrayContent(dataToSend, 0, bytesToCopy); + byteArrayContent.Headers.ContentLength = fileStream.Length; + byteArrayContent.Headers.ContentRange = new ContentRangeHeaderValue(0, fileStream.Length - 1, fileStream.Length); + byteArrayContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + + requestMessage.Content = byteArrayContent; + return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDeploymentGroupsMetricsAsync2( + string project, + string deploymentGroupName = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("281c6308-427a-49e1-b83a-dac0f4862189"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(deploymentGroupName)) + { + queryParams.Add("deploymentGroupName", deploymentGroupName); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.0-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetDeploymentGroupsMetricsAsync2( + Guid project, + string deploymentGroupName = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("281c6308-427a-49e1-b83a-dac0f4862189"); + object routeValues = new { project = project }; + + List> queryParams = new List>(); + if (!string.IsNullOrEmpty(deploymentGroupName)) + { + queryParams.Add("deploymentGroupName", deploymentGroupName); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.0-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + /// + public virtual Task> GetDeploymentTargetsAsyncWithContinuationToken( + string project, + int deploymentGroupId, + IEnumerable tags = null, + string name = null, + bool? partialNameMatch = null, + DeploymentTargetExpands? expand = null, + TaskAgentStatusFilter? agentStatus = null, + TaskAgentJobResultFilter? agentJobResult = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Boolean? enabled = null, + IEnumerable propertyFilters = null) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (partialNameMatch != null) + { + queryParams.Add("partialNameMatch", partialNameMatch.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + if (agentStatus != null) + { + queryParams.Add("agentStatus", agentStatus.Value.ToString()); + } + if (agentJobResult != null) + { + queryParams.Add("agentJobResult", agentJobResult.Value.ToString()); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (enabled != null) + { + queryParams.Add("enabled", enabled.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.1-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// The cancellation token to cancel operation. + /// + public virtual Task> GetDeploymentTargetsAsyncWithContinuationToken( + Guid project, + int deploymentGroupId, + IEnumerable tags = null, + string name = null, + bool? partialNameMatch = null, + DeploymentTargetExpands? expand = null, + TaskAgentStatusFilter? agentStatus = null, + TaskAgentJobResultFilter? agentJobResult = null, + string continuationToken = null, + int? top = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Boolean? enabled = null, + IEnumerable propertyFilters = null) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("2f0aa599-c121-4256-a5fd-ba370e0ae7b6"); + object routeValues = new { project = project, deploymentGroupId = deploymentGroupId }; + + List> queryParams = new List>(); + if (tags != null && tags.Any()) + { + queryParams.Add("tags", string.Join(",", tags)); + } + if (!string.IsNullOrEmpty(name)) + { + queryParams.Add("name", name); + } + if (partialNameMatch != null) + { + queryParams.Add("partialNameMatch", partialNameMatch.Value.ToString()); + } + if (expand != null) + { + queryParams.Add("$expand", expand.Value.ToString()); + } + if (agentStatus != null) + { + queryParams.Add("agentStatus", agentStatus.Value.ToString()); + } + if (agentJobResult != null) + { + queryParams.Add("agentJobResult", agentJobResult.Value.ToString()); + } + if (!string.IsNullOrEmpty(continuationToken)) + { + queryParams.Add("continuationToken", continuationToken); + } + if (top != null) + { + queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture)); + } + if (enabled != null) + { + queryParams.Add("enabled", enabled.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.1-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken, + processResponse: GetPagedList); + } + + protected async Task> GetPagedList(HttpResponseMessage responseMessage, CancellationToken cancellationToken) + { + var continuationToken = GetContinuationToken(responseMessage); + var list = await ReadContentAsAsync>(responseMessage, cancellationToken).ConfigureAwait(false); + return new PagedList(list, continuationToken); + } + + protected string GetContinuationToken(HttpResponseMessage responseMessage) + { + string continuationToken = null; + + IEnumerable headerValues = null; + if (responseMessage.Headers.TryGetValues("x-ms-continuationtoken", out headerValues)) + { + continuationToken = headerValues.FirstOrDefault(); + } + + return continuationToken; + } + + protected Task SendAsync( + HttpMethod method, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Func> processResponse = null) + { + return SendAsync(method, null, locationId, routeValues, version, content, queryParameters, userState, cancellationToken, processResponse); + } + + protected async Task SendAsync( + HttpMethod method, + IEnumerable> additionalHeaders, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Func> processResponse = null) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync(method, additionalHeaders, locationId, routeValues, version, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + return await SendAsync(requestMessage, userState, cancellationToken, processResponse).ConfigureAwait(false); + } + } + + protected async Task SendAsync( + HttpRequestMessage message, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + Func> processResponse = null) + { + if (processResponse == null) + { + processResponse = ReadContentAsAsync; + } + + //ConfigureAwait(false) enables the continuation to be run outside + //any captured SyncronizationContext (such as ASP.NET's) which keeps things + //from deadlocking... + using (HttpResponseMessage response = await this.SendAsync(message, userState, cancellationToken).ConfigureAwait(false)) + { + return await processResponse(response, cancellationToken).ConfigureAwait(false); + } + } + + private readonly ApiResourceVersion m_currentApiVersion = new ApiResourceVersion(3.0, 1); + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClientCompatBase.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClientCompatBase.cs new file mode 100644 index 00000000000..5e5c5074203 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentHttpClientCompatBase.cs @@ -0,0 +1,372 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [ResourceArea(TaskResourceIds.AreaId)] + public abstract class TaskAgentHttpClientCompatBase : VssHttpClientBase + { + public TaskAgentHttpClientCompatBase( + Uri baseUrl, + VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public TaskAgentHttpClientCompatBase( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public TaskAgentHttpClientCompatBase( + Uri baseUrl, + VssCredentials credentials, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public TaskAgentHttpClientCompatBase( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public TaskAgentHttpClientCompatBase( + Uri baseUrl, + HttpMessageHandler pipeline, + Boolean disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTaskGroupAsync( + string project, + Guid taskGroupId, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.0-preview.1"), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// The cancellation token to cancel operation. + public virtual async Task DeleteTaskGroupAsync( + Guid project, + Guid taskGroupId, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("DELETE"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + using (HttpResponseMessage response = await SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.0-preview.1"), + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false)) + { + return; + } + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTaskGroupsAsync( + string project, + Guid? taskGroupId = null, + bool? expanded = null, + Guid? taskIdFilter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (expanded != null) + { + queryParams.Add("expanded", expanded.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.0-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTaskGroupsAsync( + Guid project, + Guid? taskGroupId = null, + bool? expanded = null, + Guid? taskIdFilter = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (expanded != null) + { + queryParams.Add("expanded", expanded.Value.ToString()); + } + if (taskIdFilter != null) + { + queryParams.Add("taskIdFilter", taskIdFilter.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("4.0-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID or project name + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTaskGroupsAsync( + string project, + Guid? taskGroupId = null, + bool? expanded = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (expanded != null) + { + queryParams.Add("expanded", expanded.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("3.2-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] + /// + /// Project ID + /// + /// + /// + /// The cancellation token to cancel operation. + public virtual Task> GetTaskGroupsAsync( + Guid project, + Guid? taskGroupId = null, + bool? expanded = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + object routeValues = new { project = project, taskGroupId = taskGroupId }; + + List> queryParams = new List>(); + if (expanded != null) + { + queryParams.Add("expanded", expanded.Value.ToString()); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion("3.2-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get information about an agent. + /// + /// The agent pool containing the agent + /// The agent ID to get information about + /// Whether to include the agent's capabilities in the response + /// Whether to include details about the agent's current work + /// Filter which custom properties will be returned + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never), Obsolete] + public virtual Task GetAgentAsync( + int poolId, + int agentId, + bool? includeCapabilities, + bool? includeAssignedRequest, + IEnumerable propertyFilters, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId, agentId = agentId }; + + List> queryParams = new List>(); + if (includeCapabilities != null) + { + queryParams.Add("includeCapabilities", includeCapabilities.Value.ToString()); + } + if (includeAssignedRequest != null) + { + queryParams.Add("includeAssignedRequest", includeAssignedRequest.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + + return SendAsync( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + + /// + /// [Preview API] Get a list of agents. + /// + /// The agent pool containing the agents + /// Filter on agent name + /// Whether to include the agents' capabilities in the response + /// Whether to include details about the agents' current work + /// Filter which custom properties will be returned + /// Filter by demands the agents can satisfy + /// + /// The cancellation token to cancel operation. + [EditorBrowsable(EditorBrowsableState.Never), Obsolete] + public virtual Task> GetAgentsAsync( + int poolId, + string agentName, + bool? includeCapabilities, + bool? includeAssignedRequest, + IEnumerable propertyFilters, + IEnumerable demands, + object userState = null, + CancellationToken cancellationToken = default) + { + HttpMethod httpMethod = new HttpMethod("GET"); + Guid locationId = new Guid("e298ef32-5878-4cab-993c-043836571f42"); + object routeValues = new { poolId = poolId }; + + List> queryParams = new List>(); + if (agentName != null) + { + queryParams.Add("agentName", agentName); + } + if (includeCapabilities != null) + { + queryParams.Add("includeCapabilities", includeCapabilities.Value.ToString()); + } + if (includeAssignedRequest != null) + { + queryParams.Add("includeAssignedRequest", includeAssignedRequest.Value.ToString()); + } + if (propertyFilters != null && propertyFilters.Any()) + { + queryParams.Add("propertyFilters", string.Join(",", propertyFilters)); + } + if (demands != null && demands.Any()) + { + queryParams.Add("demands", string.Join(",", demands)); + } + + return SendAsync>( + httpMethod, + locationId, + routeValues: routeValues, + version: new ApiResourceVersion(5.1, 1), + queryParameters: queryParams, + userState: userState, + cancellationToken: cancellationToken); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentJob.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentJob.cs new file mode 100644 index 00000000000..1a2089846c8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentJob.cs @@ -0,0 +1,103 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentJob + { + public TaskAgentJob( + Guid id, + String name, + String container, + IList steps, + IDictionary sidecarContainers, + IList variables) + { + this.Id = id; + this.Name = name; + this.Container = container; + + m_variables = new List(variables); + m_steps = new List(steps); + + if (sidecarContainers?.Count > 0) + { + m_sidecarContainers = new Dictionary(sidecarContainers, StringComparer.OrdinalIgnoreCase); + } + } + + [DataMember] + public Guid Id + { + get; + } + + [DataMember] + public String Name + { + get; + } + + [DataMember(EmitDefaultValue = false)] + public String Container + { + get; + } + + public IList Steps + { + get + { + if (m_steps == null) + { + m_steps = new List(); + } + return m_steps; + } + } + + public IDictionary SidecarContainers + { + get + { + if (m_sidecarContainers == null) + { + m_sidecarContainers = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_sidecarContainers; + } + } + + public IList Variables + { + get + { + if (m_variables == null) + { + m_variables = new List(); + } + return m_variables; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_sidecarContainers?.Count == 0) + { + m_sidecarContainers = null; + } + } + + [DataMember(Name = "Steps", EmitDefaultValue = false)] + private List m_steps; + + [DataMember(Name = "SidecarContainers", EmitDefaultValue = false)] + private IDictionary m_sidecarContainers; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private List m_variables; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentJobRequest.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentJobRequest.cs new file mode 100644 index 00000000000..78d0ba3c6f9 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentJobRequest.cs @@ -0,0 +1,433 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// A job request for an agent. + /// + [DataContract] + public class TaskAgentJobRequest : ICloneable + { + public TaskAgentJobRequest() + { + } + + private TaskAgentJobRequest(TaskAgentJobRequest requestToBeCloned) + { + this.RequestId = requestToBeCloned.RequestId; + this.QueueTime = requestToBeCloned.QueueTime; + this.AssignTime = requestToBeCloned.AssignTime; + this.ReceiveTime = requestToBeCloned.ReceiveTime; + this.FinishTime = requestToBeCloned.FinishTime; + this.Result = requestToBeCloned.Result; + this.LockedUntil = requestToBeCloned.LockedUntil; + this.ServiceOwner = requestToBeCloned.ServiceOwner; + this.HostId = requestToBeCloned.HostId; + this.ScopeId = requestToBeCloned.ScopeId; + this.PlanType = requestToBeCloned.PlanType; + this.PlanGroup = requestToBeCloned.PlanGroup; + this.PlanId = requestToBeCloned.PlanId; + this.QueueId = requestToBeCloned.QueueId; + this.PoolId = requestToBeCloned.PoolId; + this.JobId = requestToBeCloned.JobId; + this.JobName = requestToBeCloned.JobName; + this.Demands = new List(requestToBeCloned.Demands ?? new Demand[0]); + this.LockToken = requestToBeCloned.LockToken; + this.ExpectedDuration = requestToBeCloned.ExpectedDuration; + this.OrchestrationId = requestToBeCloned.OrchestrationId; + this.MatchesAllAgentsInPool = requestToBeCloned.MatchesAllAgentsInPool; + + if (requestToBeCloned.m_matchedAgents != null && requestToBeCloned.m_matchedAgents.Count > 0) + { + m_matchedAgents = requestToBeCloned.m_matchedAgents.Select(x => x.Clone()).ToList(); + } + + if (requestToBeCloned.m_agentDelays?.Count > 0) + { + m_agentDelays = new List(requestToBeCloned.m_agentDelays); + } + + if (requestToBeCloned.ReservedAgent != null) + { + this.ReservedAgent = requestToBeCloned.ReservedAgent.Clone(); + } + + if (requestToBeCloned.m_requestAgentData?.Count > 0) + { + foreach (var pair in requestToBeCloned.m_requestAgentData) + { + this.Data[pair.Key] = pair.Value; + } + } + + if (requestToBeCloned.AgentSpecification != null) + { + this.AgentSpecification = new JObject(requestToBeCloned.AgentSpecification); + } + } + + /// + /// ID of the request. + /// + /// + [DataMember(Order = 2)] + public Int64 RequestId + { + get; + internal set; + } + + /// + /// The date/time this request was queued. + /// + /// + [DataMember(Order = 3, EmitDefaultValue = false)] + public DateTime QueueTime + { + get; + internal set; + } + + /// + /// The date/time this request was assigned. + /// + /// + [DataMember(Order = 4, EmitDefaultValue = false)] + public DateTime? AssignTime + { + get; + internal set; + } + + /// + /// The date/time this request was receieved by an agent. + /// + /// + [DataMember(Order = 5, EmitDefaultValue = false)] + public DateTime? ReceiveTime + { + get; + internal set; + } + + /// + /// The date/time this request was finished. + /// + /// + [DataMember(Order = 6, EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + internal set; + } + + /// + /// The result of this request. + /// + /// + [DataMember(Order = 8, EmitDefaultValue = false)] + public TaskResult? Result + { + get; + set; + } + + /// + /// The deadline for the agent to renew the lock. + /// + /// + [DataMember(Order = 9, EmitDefaultValue = false)] + public DateTime? LockedUntil + { + get; + internal set; + } + + /// + /// The service which owns this request. + /// + /// + [DataMember(Order = 10, EmitDefaultValue = false)] + public Guid ServiceOwner + { + get; + set; + } + + /// + /// The host which triggered this request. + /// + /// + [DataMember(Order = 11, EmitDefaultValue = false)] + public Guid HostId + { + get; + set; + } + + /// + /// Scope of the pipeline; matches the project ID. + /// + /// + [DataMember(Order = 12, EmitDefaultValue = false)] + public Guid ScopeId + { + get; + set; + } + + /// + /// Internal detail representing the type of orchestration plan. + /// + /// + [DataMember(Order = 13, EmitDefaultValue = false)] + public String PlanType + { + get; + set; + } + + /// + /// Internal ID for the orchestration plan connected with this request. + /// + /// + [DataMember(Order = 14, EmitDefaultValue = false)] + public Guid PlanId + { + get; + set; + } + + /// + /// ID of the job resulting from this request. + /// + /// + [DataMember(Order = 15, EmitDefaultValue = false)] + public Guid JobId + { + get; + set; + } + + /// + /// Name of the job resulting from this request. + /// + /// + [DataMember(Order = 21, EmitDefaultValue = false)] + public String JobName + { + get; + set; + } + + /// + /// A list of demands required to fulfill this request. + /// + /// + [DataMember(Order = 16, EmitDefaultValue = false)] + public IList Demands + { + get; + set; + } + + /// + /// The agent allocated for this request. + /// + /// + [DataMember(Order = 17, EmitDefaultValue = false)] + public TaskAgentReference ReservedAgent + { + get; + internal set; + } + + public List MatchedAgents + { + get + { + if (m_matchedAgents == null) + { + m_matchedAgents = new List(); + } + return m_matchedAgents; + } + } + + /// + /// The pipeline definition associated with this request + /// + /// + [DataMember(Order = 19, EmitDefaultValue = false)] + public TaskOrchestrationOwner Definition + { + get; + set; + } + + /// + /// The pipeline associated with this request + /// + /// + [DataMember(Order = 20, EmitDefaultValue = false)] + public TaskOrchestrationOwner Owner + { + get; + set; + } + + /// + /// Additional data about the request. + /// + /// + [DataMember(Order = 22, EmitDefaultValue = false)] + public IDictionary Data + { + get + { + if (m_requestAgentData == null) + { + m_requestAgentData = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_requestAgentData; + } + } + + [DataMember(Order = 23, EmitDefaultValue = false)] + public String PlanGroup + { + get; + set; + } + + /// + /// The ID of the pool this request targets + /// + /// + [DataMember(Order = 24, EmitDefaultValue = false)] + internal Int32 PoolId + { + get; + set; + } + + /// + /// The ID of the queue this request targets + /// + /// + [DataMember(Order = 25, EmitDefaultValue = false)] + internal Int32? QueueId + { + get; + set; + } + + [DataMember(Order = 26, EmitDefaultValue = false)] + public List AgentDelays + { + get + { + if (m_agentDelays == null) + { + m_agentDelays = new List(); + } + return m_agentDelays; + } + internal set + { + m_agentDelays = value; + } + } + + [DataMember(Order = 27, EmitDefaultValue = false)] + public TimeSpan? ExpectedDuration + { + get; + set; + } + + [DataMember(Order = 28, EmitDefaultValue = false)] + public JObject AgentSpecification + { + get; + set; + } + + [DataMember(Order = 29, EmitDefaultValue = false)] + public String OrchestrationId + { + get; + set; + } + + [DataMember(Order = 30, EmitDefaultValue = false)] + public Boolean MatchesAllAgentsInPool + { + get; + set; + } + + [DataMember(Order = 31, EmitDefaultValue = false)] + public String StatusMessage + { + get; + set; + } + + [DataMember(Order = 32, EmitDefaultValue = false)] + public bool UserDelayed + { + get; + set; + } + + [IgnoreDataMember] + internal Guid? LockToken + { + get; + set; + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + public TaskAgentJobRequest Clone() + { + return new TaskAgentJobRequest(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedMatchedAgents, ref m_matchedAgents, true); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedMatchedAgents = null; + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_matchedAgents, ref m_serializedMatchedAgents); + } + + private List m_matchedAgents; + + private List m_agentDelays; + + private IDictionary m_requestAgentData; + + [DataMember(Name = "MatchedAgents", Order = 18, EmitDefaultValue = false)] + private List m_serializedMatchedAgents; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentJobResultFilter.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentJobResultFilter.cs new file mode 100644 index 00000000000..eed282b4f28 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentJobResultFilter.cs @@ -0,0 +1,37 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// This is useful in getting a list of deployment targets, filtered by the result of their last job. + /// + [Flags] + [DataContract] + public enum TaskAgentJobResultFilter + { + /// + /// Only those deployment targets on which last job failed (**Abandoned**, **Canceled**, **Failed**, **Skipped**). + /// + [EnumMember] + Failed = 1, + + /// + /// Only those deployment targets on which last job Passed (**Succeeded**, **Succeeded with issues**). + /// + [EnumMember] + Passed = 2, + + /// + /// Only those deployment targets that never executed a job. + /// + [EnumMember] + NeverDeployed = 4, + + /// + /// All deployment targets. + /// + [EnumMember] + All = Failed | Passed | NeverDeployed + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentJobStep.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentJobStep.cs new file mode 100644 index 00000000000..d585ce90c9e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentJobStep.cs @@ -0,0 +1,90 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentJobStep + { + [DataContract] + public enum TaskAgentJobStepType + { + [DataMember] + Task = 1, + + [DataMember] + Action = 2 + } + + [DataMember(EmitDefaultValue = false)] + public TaskAgentJobStepType Type + { + get; + set; + } + + [DataMember] + public Guid Id + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public Boolean Enabled + { + get; + set; + } + + [DataMember] + public String Condition + { + get; + set; + } + + [DataMember] + public Boolean ContinueOnError + { + get; + set; + } + + [DataMember] + public Int32 TimeoutInMinutes + { + get; + set; + } + + [DataMember] + public TaskAgentJobTask Task + { + get; + set; + } + + [DataMember] + public IDictionary Env + { + get; + set; + } + + [DataMember] + public IDictionary Inputs + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentJobTask.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentJobTask.cs new file mode 100644 index 00000000000..9266101e06a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentJobTask.cs @@ -0,0 +1,30 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentJobTask + { + [DataMember] + public Guid Id + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public String Version + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentJobVariable.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentJobVariable.cs new file mode 100644 index 00000000000..54d1eb8e1f8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentJobVariable.cs @@ -0,0 +1,30 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentJobVariable + { + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public String Value + { + get; + set; + } + + [DataMember] + public Boolean Secret + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentMessage.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentMessage.cs new file mode 100644 index 00000000000..b23ebddc8fa --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentMessage.cs @@ -0,0 +1,60 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Provides a contract for receiving messages from the task orchestrator. + /// + [DataContract] + public sealed class TaskAgentMessage + { + /// + /// Initializes an empty TaskAgentMessage instance. + /// + public TaskAgentMessage() + { + } + + /// + /// Gets or sets the message identifier. + /// + [DataMember(EmitDefaultValue = false)] + public Int64 MessageId + { + get; + set; + } + + /// + /// Gets or sets the message type, describing the data contract found in TaskAgentMessage.Body. + /// + [DataMember] + public String MessageType + { + get; + set; + } + + /// + /// Gets or sets the intialization vector used to encrypt this message. + /// + [DataMember(EmitDefaultValue = false)] + public Byte[] IV + { + get; + set; + } + + /// + /// Gets or sets the body of the message. If the IV property is provided the body will need to be + /// decrypted using the TaskAgentSession.EncryptionKey value in addition to the IV. + /// + [DataMember] + public String Body + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPool.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPool.cs new file mode 100644 index 00000000000..273b5b92efa --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPool.cs @@ -0,0 +1,201 @@ +using GitHub.Services.WebApi; +using System; +using System.Runtime.Serialization; +using System.ComponentModel; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// An organization-level grouping of agents. + /// + [DataContract] + public class TaskAgentPool : TaskAgentPoolReference + { + internal TaskAgentPool() + { + } + + public TaskAgentPool(String name) + { + this.Name = name; + } + + private TaskAgentPool(TaskAgentPool poolToBeCloned) + { + this.AutoProvision = poolToBeCloned.AutoProvision; + this.CreatedBy = poolToBeCloned.CreatedBy?.Clone(); + this.CreatedOn = poolToBeCloned.CreatedOn; + this.Id = poolToBeCloned.Id; + this.IsHosted = poolToBeCloned.IsHosted; + this.Name = poolToBeCloned.Name; + this.Scope = poolToBeCloned.Scope; + this.Size = poolToBeCloned.Size; + this.PoolType = poolToBeCloned.PoolType; + this.Owner = poolToBeCloned.Owner?.Clone(); + this.AgentCloudId = poolToBeCloned.AgentCloudId; + this.TargetSize = poolToBeCloned.TargetSize; + this.IsLegacy = poolToBeCloned.IsLegacy; + +#pragma warning disable 0618 + this.AdministratorsGroup = poolToBeCloned.AdministratorsGroup?.Clone(); + this.GroupScopeId = poolToBeCloned.GroupScopeId; + this.Provisioned = poolToBeCloned.Provisioned; + this.ServiceAccountsGroup = poolToBeCloned.ServiceAccountsGroup?.Clone(); +#pragma warning restore 0618 + + if (poolToBeCloned.m_properties != null) + { + m_properties = new PropertiesCollection(poolToBeCloned.m_properties); + } + } + + /// + /// The date/time of the pool creation. + /// + [DataMember] + public DateTime CreatedOn + { + get; + internal set; + } + + /// + /// Whether or not a queue should be automatically provisioned for + /// each project collection. + /// + [DataMember] + public Boolean? AutoProvision + { + get; + set; + } + + /// + /// Whether or not the pool should autosize itself based on the + /// Agent Cloud Provider settings. + /// + [DataMember] + public Boolean? AutoSize + { + get; + set; + } + + /// + /// Target parallelism. + /// + [DataMember] + public Int32? TargetSize + { + get; + set; + } + + /// + /// The ID of the associated agent cloud. + /// + [DataMember] + public Int32? AgentCloudId + { + get; + set; + } + + /// + /// Creator of the pool. The creator of the pool is automatically added into the + /// administrators group for the pool on creation. + /// + [DataMember] + public IdentityRef CreatedBy + { + get; + set; + } + + /// + /// Owner or administrator of the pool. + /// + [DataMember] + public IdentityRef Owner + { + get; + set; + } + + /// + /// Properties which may be used to extend the storage fields available + /// for a given machine instance. + /// + public PropertiesCollection Properties + { + get + { + if (m_properties == null) + { + m_properties = new PropertiesCollection(); + } + return m_properties; + } + internal set + { + m_properties = value; + } + } + + #region Obsolete Properties + + /// + /// Gets the scope identifier for groups/roles which are owned by this pool. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("This property is no longer used and will be removed in a future version.", false)] + public Guid GroupScopeId + { + get; + internal set; + } + + /// + /// Gets a value indicating whether or not roles have been provisioned for this pool. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("This property is no longer used and will be removed in a future version.", false)] + public Boolean Provisioned + { + get; + internal set; + } + + /// + /// Gets the administrators group for this agent pool. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("This property is no longer used and will be removed in a future version.", false)] + public IdentityRef AdministratorsGroup + { + get; + internal set; + } + + /// + /// Gets the service accounts group for this agent pool. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("This property is no longer used and will be removed in a future version.", false)] + public IdentityRef ServiceAccountsGroup + { + get; + internal set; + } + + #endregion + + public new TaskAgentPool Clone() + { + return new TaskAgentPool(this); + } + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Properties")] + private PropertiesCollection m_properties; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolActionFilter.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolActionFilter.cs new file mode 100644 index 00000000000..83c5472e902 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolActionFilter.cs @@ -0,0 +1,22 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Filters pools based on whether the calling user has permission to use or manage the pool. + /// + [Flags] + [DataContract] + public enum TaskAgentPoolActionFilter + { + [EnumMember] + None = 0, + + [EnumMember] + Manage = 2, + + [EnumMember] + Use = 16, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceDefinition.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceDefinition.cs new file mode 100644 index 00000000000..9a4e5e59510 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceDefinition.cs @@ -0,0 +1,168 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentPoolMaintenanceDefinition + { + internal TaskAgentPoolMaintenanceDefinition() + { + } + + private TaskAgentPoolMaintenanceDefinition(TaskAgentPoolMaintenanceDefinition maintenanceDefinitionToBeCloned) + { + this.Enabled = maintenanceDefinitionToBeCloned.Enabled; + this.JobTimeoutInMinutes = maintenanceDefinitionToBeCloned.JobTimeoutInMinutes; + this.MaxConcurrentAgentsPercentage = maintenanceDefinitionToBeCloned.MaxConcurrentAgentsPercentage; + + if (maintenanceDefinitionToBeCloned.Pool != null) + { + this.Pool = new TaskAgentPoolReference + { + Id = maintenanceDefinitionToBeCloned.Pool.Id, + Name = maintenanceDefinitionToBeCloned.Pool.Name, + Scope = maintenanceDefinitionToBeCloned.Pool.Scope, + PoolType = maintenanceDefinitionToBeCloned.Pool.PoolType + }; + } + + this.m_options = maintenanceDefinitionToBeCloned.Options.Clone(); + this.m_retentionPolicy = maintenanceDefinitionToBeCloned.RetentionPolicy.Clone(); + this.m_scheduleSetting = maintenanceDefinitionToBeCloned.ScheduleSetting.Clone(); + } + + /// + /// Id + /// + [DataMember] + public Int32 Id + { + get; + internal set; + } + + /// + /// Pool reference for the maintenance definition + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolReference Pool + { + get; + set; + } + + /// + /// Enable maintenance + /// + [DataMember] + public Boolean Enabled + { + get; + set; + } + + /// + /// Maintenance job timeout per agent + /// + [DataMember] + public Int32 JobTimeoutInMinutes + { + get; + set; + } + + /// + /// Max percentage of agents within a pool running maintenance job at given time + /// + [DataMember] + public Int32 MaxConcurrentAgentsPercentage + { + get; + set; + } + + /// + /// Maintenance option for the definition + /// + public TaskAgentPoolMaintenanceOptions Options + { + get + { + if (m_options == null) + { + m_options = new TaskAgentPoolMaintenanceOptions() + { + WorkingDirectoryExpirationInDays = 0, + }; + } + + return m_options; + } + internal set + { + m_options = value; + } + } + + /// + /// The retention setting for the pool maintenance definition. + /// + public TaskAgentPoolMaintenanceRetentionPolicy RetentionPolicy + { + get + { + if (m_retentionPolicy == null) + { + m_retentionPolicy = new TaskAgentPoolMaintenanceRetentionPolicy() + { + NumberOfHistoryRecordsToKeep = 1, + }; + } + + return m_retentionPolicy; + } + internal set + { + m_retentionPolicy = value; + } + } + + /// + /// The schedule setting for the pool maintenance job. + /// + public TaskAgentPoolMaintenanceSchedule ScheduleSetting + { + get + { + if (m_scheduleSetting == null) + { + m_scheduleSetting = new TaskAgentPoolMaintenanceSchedule() + { + DaysToBuild = TaskAgentPoolMaintenanceScheduleDays.None, + }; + } + + return m_scheduleSetting; + } + internal set + { + m_scheduleSetting = value; + } + } + + public TaskAgentPoolMaintenanceDefinition Clone() + { + return new TaskAgentPoolMaintenanceDefinition(this); + } + + [DataMember(EmitDefaultValue = false, Name = "Options")] + public TaskAgentPoolMaintenanceOptions m_options; + + [DataMember(EmitDefaultValue = false, Name = "RetentionPolicy")] + private TaskAgentPoolMaintenanceRetentionPolicy m_retentionPolicy; + + [DataMember(EmitDefaultValue = false, Name = "ScheduleSetting")] + private TaskAgentPoolMaintenanceSchedule m_scheduleSetting; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJob.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJob.cs new file mode 100644 index 00000000000..4d1f5647f78 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJob.cs @@ -0,0 +1,169 @@ +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentPoolMaintenanceJob + { + internal TaskAgentPoolMaintenanceJob() + { + } + + /// + /// Id of the maintenance job + /// + [DataMember] + public Int32 JobId + { + get; + internal set; + } + + /// + /// Pool reference for the maintenance job + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolReference Pool + { + get; + set; + } + + /// + /// Orchestration/Plan Id for the maintenance job + /// + [DataMember] + public Guid OrchestrationId + { + get; + internal set; + } + + /// + /// The maintenance definition for the maintenance job + /// + [DataMember] + public Int32 DefinitionId + { + get; + set; + } + + /// + /// Status of the maintenance job + /// + [DataMember] + public TaskAgentPoolMaintenanceJobStatus Status + { + get; + set; + } + + /// + /// The maintenance job result + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolMaintenanceJobResult? Result + { + get; + internal set; + } + + /// + /// Time that the maintenance job was queued + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? QueueTime + { + get; + internal set; + } + + /// + /// Time that the maintenance job was started + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? StartTime + { + get; + internal set; + } + + /// + /// Time that the maintenance job was completed + /// + [DataMember(EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + internal set; + } + + /// + /// The identity that queued the maintenance job + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef RequestedBy + { + get; + internal set; + } + + /// + /// The total error counts during the maintenance job + /// + [DataMember] + public Int32 ErrorCount + { + get; + internal set; + } + + /// + /// The total warning counts during the maintenance job + /// + [DataMember] + public Int32 WarningCount + { + get; + internal set; + } + + /// + /// The log download url for the maintenance job + /// + [DataMember] + public String LogsDownloadUrl + { + get; + internal set; + } + + + /// + /// All agents that the maintenance job will run on + /// + public List TargetAgents + { + get + { + if (m_targetAgents == null) + { + m_targetAgents = new List(); + } + + return m_targetAgents; + } + internal set + { + m_targetAgents = value; + } + } + + [DataMember(EmitDefaultValue = false, Name = "TargetAgents")] + private List m_targetAgents; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobResult.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobResult.cs new file mode 100644 index 00000000000..525947ba0cf --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobResult.cs @@ -0,0 +1,18 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskAgentPoolMaintenanceJobResult + { + [EnumMember] + Succeeded = 1, + + [EnumMember] + Failed = 2, + + [EnumMember] + Canceled = 4, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobStatus.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobStatus.cs new file mode 100644 index 00000000000..a8c594f7b1d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobStatus.cs @@ -0,0 +1,21 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskAgentPoolMaintenanceJobStatus + { + [EnumMember] + InProgress = 1, + + [EnumMember] + Completed = 2, + + [EnumMember] + Cancelling = 4, + + [EnumMember] + Queued = 8, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobTargetAgent.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobTargetAgent.cs new file mode 100644 index 00000000000..e200ef5ecb5 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceJobTargetAgent.cs @@ -0,0 +1,41 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentPoolMaintenanceJobTargetAgent + { + internal TaskAgentPoolMaintenanceJobTargetAgent() + { + } + + [DataMember] + public Int32 JobId + { + get; + set; + } + + [DataMember] + public TaskAgentReference Agent + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolMaintenanceJobStatus? Status + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolMaintenanceJobResult? Result + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceOptions.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceOptions.cs new file mode 100644 index 00000000000..332c32b3f37 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceOptions.cs @@ -0,0 +1,33 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskAgentPoolMaintenanceOptions + { + internal TaskAgentPoolMaintenanceOptions() + { + } + + private TaskAgentPoolMaintenanceOptions(TaskAgentPoolMaintenanceOptions maintenanceOptionToBeCloned) + { + this.WorkingDirectoryExpirationInDays = maintenanceOptionToBeCloned.WorkingDirectoryExpirationInDays; + } + + /// + /// time to consider a System.DefaultWorkingDirectory is stale + /// + [DataMember] + public Int32 WorkingDirectoryExpirationInDays + { + get; + set; + } + + public TaskAgentPoolMaintenanceOptions Clone() + { + return new TaskAgentPoolMaintenanceOptions(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceRetentionPolicy.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceRetentionPolicy.cs new file mode 100644 index 00000000000..e5bb9d57a2b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceRetentionPolicy.cs @@ -0,0 +1,47 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskAgentPoolMaintenanceRetentionPolicy + { + internal TaskAgentPoolMaintenanceRetentionPolicy() + { } + + private TaskAgentPoolMaintenanceRetentionPolicy(TaskAgentPoolMaintenanceRetentionPolicy maintenanceRetentionPolicyToBeCloned) + { + this.NumberOfHistoryRecordsToKeep = maintenanceRetentionPolicyToBeCloned.NumberOfHistoryRecordsToKeep; + } + + /// + /// Number of records to keep for maintenance job executed with this definition. + /// + [DataMember] + public Int32 NumberOfHistoryRecordsToKeep + { + get + { + return m_numberOfHistoryRecordsToKeep; + } + internal set + { + if (value < 1) + { + m_numberOfHistoryRecordsToKeep = 1; + } + else + { + m_numberOfHistoryRecordsToKeep = value; + } + } + } + + public TaskAgentPoolMaintenanceRetentionPolicy Clone() + { + return new TaskAgentPoolMaintenanceRetentionPolicy(this); + } + + private Int32 m_numberOfHistoryRecordsToKeep; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceSchedule.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceSchedule.cs new file mode 100644 index 00000000000..a931287efae --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceSchedule.cs @@ -0,0 +1,58 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskAgentPoolMaintenanceSchedule + { + internal TaskAgentPoolMaintenanceSchedule() + { + this.DaysToBuild = TaskAgentPoolMaintenanceScheduleDays.None; + } + + private TaskAgentPoolMaintenanceSchedule(TaskAgentPoolMaintenanceSchedule maintenanceScheduleToBeCloned) + { + this.ScheduleJobId = maintenanceScheduleToBeCloned.ScheduleJobId; + this.StartHours = maintenanceScheduleToBeCloned.StartHours; + this.StartMinutes = maintenanceScheduleToBeCloned.StartMinutes; + this.TimeZoneId = maintenanceScheduleToBeCloned.TimeZoneId; + this.DaysToBuild = maintenanceScheduleToBeCloned.DaysToBuild; + } + + /// + /// The Job Id of the Scheduled job that will queue the pool maintenance job. + /// + [DataMember] + public Guid ScheduleJobId { get; set; } + + /// + /// Time zone of the build schedule (string representation of the time zone id) + /// + [DataMember] + public String TimeZoneId { get; set; } + + /// + /// Local timezone hour to start + /// + [DataMember] + public Int32 StartHours { get; set; } + + /// + /// Local timezone minute to start + /// + [DataMember] + public Int32 StartMinutes { get; set; } + + /// + /// Days for a build (flags enum for days of the week) + /// + [DataMember] + public TaskAgentPoolMaintenanceScheduleDays DaysToBuild { get; set; } + + public TaskAgentPoolMaintenanceSchedule Clone() + { + return new TaskAgentPoolMaintenanceSchedule(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceScheduleDays.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceScheduleDays.cs new file mode 100644 index 00000000000..e96d345724c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolMaintenanceScheduleDays.cs @@ -0,0 +1,62 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskAgentPoolMaintenanceScheduleDays + { + /// + /// Do not run. + /// + [EnumMember] + None = 0, + + /// + /// Run on Monday. + /// + [EnumMember] + Monday = 1, + + /// + /// Run on Tuesday. + /// + [EnumMember] + Tuesday = 2, + + /// + /// Run on Wednesday. + /// + [EnumMember] + Wednesday = 4, + + /// + /// Run on Thursday. + /// + [EnumMember] + Thursday = 8, + + /// + /// Run on Friday. + /// + [EnumMember] + Friday = 16, + + /// + /// Run on Saturday. + /// + [EnumMember] + Saturday = 32, + + /// + /// Run on Sunday. + /// + [EnumMember] + Sunday = 64, + + /// + /// Run on all days of the week. + /// + [EnumMember] + All = Monday | Tuesday | Wednesday | Thursday | Friday | Saturday | Sunday, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolReference.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolReference.cs new file mode 100644 index 00000000000..bbef3809f77 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolReference.cs @@ -0,0 +1,108 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAgentPoolReference + { + public TaskAgentPoolReference() + { + } + + public TaskAgentPoolReference( + Guid scope, + Int32 id, + TaskAgentPoolType poolType = TaskAgentPoolType.Automation) + { + this.Id = id; + this.Scope = scope; + this.PoolType = poolType; + } + + protected TaskAgentPoolReference(TaskAgentPoolReference referenceToBeCloned) + { + this.Id = referenceToBeCloned.Id; + this.Name = referenceToBeCloned.Name; + this.Scope = referenceToBeCloned.Scope; + this.IsHosted = referenceToBeCloned.IsHosted; + this.PoolType = referenceToBeCloned.PoolType; + this.Size = referenceToBeCloned.Size; + this.IsLegacy = referenceToBeCloned.IsLegacy; + } + + public TaskAgentPoolReference Clone() + { + return new TaskAgentPoolReference(this); + } + + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid Scope + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Gets or sets a value indicating whether or not this pool is managed by the service. + /// + [DataMember] + public Boolean IsHosted + { + get; + set; + } + + /// + /// Gets or sets the type of the pool + /// + [DataMember] + public TaskAgentPoolType PoolType + { + get + { + return m_poolType; + } + set + { + m_poolType = value; + } + } + + /// + /// Gets the current size of the pool. + /// + [DataMember] + public Int32 Size + { + get; + set; + } + + /// + /// Determines whether the pool is legacy. + /// + [DataMember] + public Boolean? IsLegacy + { + get; + set; + } + + private TaskAgentPoolType m_poolType = TaskAgentPoolType.Automation; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPoolType.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolType.cs new file mode 100644 index 00000000000..d69fc2092f5 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPoolType.cs @@ -0,0 +1,23 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// The type of agent pool. + /// + [DataContract] + public enum TaskAgentPoolType + { + /// + /// A typical pool of task agents + /// + [EnumMember] + Automation = 1, + + /// + /// A deployment pool + /// + [EnumMember] + Deployment = 2 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentProvisiongStateConstants.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentProvisiongStateConstants.cs new file mode 100644 index 00000000000..fd4c5ccd2c2 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentProvisiongStateConstants.cs @@ -0,0 +1,13 @@ +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public class TaskAgentProvisioningStateConstants + { + public const String Deallocated = "Deallocated"; + public const String Provisioning = "Provisioning"; + public const String Provisioned = "Provisioned"; + public const String Deprovisioning = "Deprovisioning"; + public const String RunningRequest = "RunningRequest"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentPublicKey.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentPublicKey.cs new file mode 100644 index 00000000000..f17d4d6705d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentPublicKey.cs @@ -0,0 +1,76 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents the public key portion of an RSA asymmetric key. + /// + [DataContract] + public sealed class TaskAgentPublicKey + { + /// + /// Initializes a new TaskAgentPublicKey instance with empty exponent and modulus values. + /// + public TaskAgentPublicKey() + { + } + + /// + /// Initializes a new TaskAgentPublicKey instance with the specified exponent and modulus values. + /// + /// The exponent value of the key + /// The modulus value of the key + public TaskAgentPublicKey( + Byte[] exponent, + Byte[] modulus) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(exponent, nameof(exponent)); + ArgumentUtility.CheckEnumerableForNullOrEmpty(modulus, nameof(modulus)); + + this.Exponent = exponent; + this.Modulus = modulus; + } + + private TaskAgentPublicKey(TaskAgentPublicKey objectToBeCloned) + { + if (objectToBeCloned.Exponent != null) + { + this.Exponent = new Byte[objectToBeCloned.Exponent.Length]; + Buffer.BlockCopy(objectToBeCloned.Exponent, 0, this.Exponent, 0, objectToBeCloned.Exponent.Length); + } + + if (objectToBeCloned.Modulus != null) + { + this.Modulus = new Byte[objectToBeCloned.Modulus.Length]; + Buffer.BlockCopy(objectToBeCloned.Modulus, 0, this.Modulus, 0, objectToBeCloned.Modulus.Length); + } + } + + /// + /// Gets or sets the exponent for the public key. + /// + [DataMember(EmitDefaultValue = false)] + public Byte[] Exponent + { + get; + set; + } + + /// + /// Gets or sets the modulus for the public key. + /// + [DataMember(EmitDefaultValue = false)] + public Byte[] Modulus + { + get; + set; + } + + public TaskAgentPublicKey Clone() + { + return new TaskAgentPublicKey(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentQueue.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentQueue.cs new file mode 100644 index 00000000000..73dee04ee07 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentQueue.cs @@ -0,0 +1,97 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// An agent queue. + /// + [DataContract] + public class TaskAgentQueue + { + public TaskAgentQueue() + { + } + + private TaskAgentQueue(TaskAgentQueue queueToBeCloned) + { + this.Id = queueToBeCloned.Id; + this.ProjectId = queueToBeCloned.ProjectId; + this.Name = queueToBeCloned.Name; +#pragma warning disable 0618 + this.GroupScopeId = queueToBeCloned.GroupScopeId; + this.Provisioned = queueToBeCloned.Provisioned; +#pragma warning restore 0618 + if (queueToBeCloned.Pool != null) + { + this.Pool = queueToBeCloned.Pool.Clone(); + } + } + + /// + /// ID of the queue + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + /// + /// Project ID + /// + [DataMember(EmitDefaultValue = false)] + public Guid ProjectId + { + get; + set; + } + + /// + /// Name of the queue + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Pool reference for this queue + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentPoolReference Pool + { + get; + set; + } + + #region Obsolete Properties + + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("This property is no longer used and will be removed in a future version.", false)] + public Guid GroupScopeId + { + get; + set; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [Obsolete("This property is no longer used and will be removed in a future version.", false)] + public Boolean Provisioned + { + get; + set; + } + + #endregion + + public TaskAgentQueue Clone() + { + return new TaskAgentQueue(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentQueueActionFilter.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentQueueActionFilter.cs new file mode 100644 index 00000000000..69f83233656 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentQueueActionFilter.cs @@ -0,0 +1,22 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Filters queues based on whether the calling user has permission to use or manage the queue. + /// + [Flags] + [DataContract] + public enum TaskAgentQueueActionFilter + { + [EnumMember] + None = 0, + + [EnumMember] + Manage = 2, + + [EnumMember] + Use = 16, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentReference.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentReference.cs new file mode 100644 index 00000000000..12427442688 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentReference.cs @@ -0,0 +1,146 @@ +using GitHub.Services.WebApi; +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// A reference to an agent. + /// + [DataContract] + public class TaskAgentReference : ICloneable + { + public TaskAgentReference() + { + } + + protected TaskAgentReference(TaskAgentReference referenceToBeCloned) + { + this.Id = referenceToBeCloned.Id; + this.Name = referenceToBeCloned.Name; + this.Version = referenceToBeCloned.Version; + this.Enabled = referenceToBeCloned.Enabled; + this.Status = referenceToBeCloned.Status; + this.OSDescription = referenceToBeCloned.OSDescription; + this.ProvisioningState = referenceToBeCloned.ProvisioningState; + this.AccessPoint = referenceToBeCloned.AccessPoint; + + if (referenceToBeCloned.m_links != null) + { + m_links = referenceToBeCloned.m_links.Clone(); + } + } + + /// + /// Identifier of the agent. + /// + [DataMember] + public Int32 Id + { + get; + set; + } + + /// + /// Name of the agent. + /// + [DataMember] + public String Name + { + get; + set; + } + + /// + /// Agent version. + /// + [DataMember] + public String Version + { + get; + set; + } + + /// + /// Agent OS. + /// + [DataMember(EmitDefaultValue = false)] + public String OSDescription + { + get; + set; + } + + /// + /// Whether or not this agent should run jobs. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean? Enabled + { + get; + set; + } + + /// + /// Whether or not the agent is online. + /// + [DataMember] + public TaskAgentStatus Status + { + get; + set; + } + + /// + /// Provisioning state of this agent. + /// + [DataMember] + public String ProvisioningState + { + get; + set; + } + + /// + /// This agent's access point. + /// + [DataMember(EmitDefaultValue = false)] + public String AccessPoint + { + get; + set; + } + + /// + /// Other details about the agent. + /// + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + internal set + { + m_links = value; + } + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + public TaskAgentReference Clone() + { + return new TaskAgentReference(this); + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs new file mode 100644 index 00000000000..51d95b67eac --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentSession.cs @@ -0,0 +1,121 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents a session for performing message exchanges from an agent. + /// + [DataContract] + public class TaskAgentSession + { + public TaskAgentSession() + { + } + + /// + /// Initializes a new TaskAgentSession instance with the specified owner name and agent. + /// + /// The name of the owner for this session. This should typically be the agent machine + /// The target agent for the session + public TaskAgentSession( + String ownerName, + TaskAgentReference agent) + { + this.Agent = agent; + this.OwnerName = ownerName; + } + + /// + /// Initializes a new TaskAgentSession isntance with the specified owner name, agent, and capabilities. + /// + /// The name of the owner for this session. This should typically be the agent machine + /// The target agent for the session + /// A collection of capabilities to publish on session creation + public TaskAgentSession( + String ownerName, + TaskAgentReference agent, + IDictionary systemCapabilities) + { + this.Agent = agent; + this.OwnerName = ownerName; + + foreach (var capability in systemCapabilities) + { + if (capability.Value != null) + { + this.SystemCapabilities.Add(capability.Key, capability.Value); + } + } + } + + /// + /// Gets the unique identifier for this session. + /// + [DataMember] + public Guid SessionId + { + get; + internal set; + } + + /// + /// Gets the key used to encrypt message traffic for this session. + /// + [DataMember(EmitDefaultValue = false)] + public TaskAgentSessionKey EncryptionKey + { + get; + internal set; + } + + /// + /// Gets or sets the owner name of this session. Generally this will be the machine of origination. + /// + [DataMember] + public String OwnerName + { + get; + set; + } + + /// + /// Gets or sets the agent which is the target of the session. + /// + [DataMember] + public TaskAgentReference Agent + { + get; + set; + } + + /// + /// Gets the collection of system capabilities used for this session. + /// + public IDictionary SystemCapabilities + { + get + { + if (m_systemCapabilities == null) + { + m_systemCapabilities = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_systemCapabilities; + } + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_systemCapabilities?.Count == 0) + { + m_systemCapabilities = null; + } + } + + + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "SystemCapabilities")] + private IDictionary m_systemCapabilities; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentSessionKey.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentSessionKey.cs new file mode 100644 index 00000000000..852af12e454 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentSessionKey.cs @@ -0,0 +1,34 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents a symmetric key used for message-level encryption for communication sent to an agent. + /// + [DataContract] + public sealed class TaskAgentSessionKey + { + /// + /// Gets or sets a value indicating whether or not the key value is encrypted. If this value is true, the + /// property should be decrypted using the RSA key exchanged with the server during + /// registration. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean Encrypted + { + get; + set; + } + + /// + /// Gets or sets the symmetric key value. + /// + [DataMember(EmitDefaultValue = false)] + public Byte[] Value + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentStatus.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentStatus.cs new file mode 100644 index 00000000000..205ee387180 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentStatus.cs @@ -0,0 +1,14 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskAgentStatus + { + [EnumMember] + Offline = 1, + + [EnumMember] + Online = 2, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentStatusFilter.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentStatusFilter.cs new file mode 100644 index 00000000000..ed531c13ef0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentStatusFilter.cs @@ -0,0 +1,31 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// This is useful in getting a list of deployment targets, filtered by the deployment agent status. + /// + [Flags] + [DataContract] + public enum TaskAgentStatusFilter + { + /// + /// Only deployment targets that are offline. + /// + [EnumMember] + Offline = 1, + + /// + /// Only deployment targets that are online. + /// + [EnumMember] + Online = 2, + + /// + /// All deployment targets. + /// + [EnumMember] + All = Offline | Online + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentUpdate.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentUpdate.cs new file mode 100644 index 00000000000..cc9928a736b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentUpdate.cs @@ -0,0 +1,115 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Details about an agent update. + /// + [DataContract] + public class TaskAgentUpdate + { + internal TaskAgentUpdate() + { + } + + private TaskAgentUpdate(TaskAgentUpdate agentUpdateToBeCloned) + { + this.CurrentState = agentUpdateToBeCloned.CurrentState; + if (agentUpdateToBeCloned.SourceVersion != null) + { + this.SourceVersion = agentUpdateToBeCloned.SourceVersion.Clone(); + } + if (agentUpdateToBeCloned.TargetVersion != null) + { + this.TargetVersion = agentUpdateToBeCloned.TargetVersion.Clone(); + } + if (agentUpdateToBeCloned.RequestTime != null) + { + this.RequestTime = agentUpdateToBeCloned.RequestTime; + } + if (agentUpdateToBeCloned.RequestedBy != null) + { + this.RequestedBy = agentUpdateToBeCloned.RequestedBy.Clone(); + } + if (agentUpdateToBeCloned.Reason != null) + { + switch (agentUpdateToBeCloned.Reason.Code) + { + case TaskAgentUpdateReasonType.Manual: + this.Reason = (agentUpdateToBeCloned.Reason as TaskAgentManualUpdate).Clone(); + break; + case TaskAgentUpdateReasonType.MinAgentVersionRequired: + this.Reason = (agentUpdateToBeCloned.Reason as TaskAgentMinAgentVersionRequiredUpdate).Clone(); + break; + } + } + } + + /// + /// Source agent version of the update. + /// + [DataMember] + public PackageVersion SourceVersion + { + get; + internal set; + } + + /// + /// Target agent version of the update. + /// + [DataMember] + public PackageVersion TargetVersion + { + get; + internal set; + } + + /// + /// Date on which this update was requested. + /// + [DataMember] + public DateTime? RequestTime + { + get; + internal set; + } + + /// + /// Identity which requested this update. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef RequestedBy + { + get; + internal set; + } + + /// + /// Current state of this agent update. + /// + [DataMember(EmitDefaultValue = false)] + public String CurrentState + { + get; + set; + } + + /// + /// Reason for this update. + /// + [DataMember] + public TaskAgentUpdateReason Reason + { + get; + set; + } + + public TaskAgentUpdate Clone() + { + return new TaskAgentUpdate(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAgentUpdateReason.cs b/src/Sdk/DTWebApi/WebApi/TaskAgentUpdateReason.cs new file mode 100644 index 00000000000..099c61a2f36 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAgentUpdateReason.cs @@ -0,0 +1,178 @@ +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskAgentUpdateReasonType + { + [EnumMember] + Manual = 1, + + [EnumMember] + MinAgentVersionRequired = 2, + } + + internal sealed class TaskAgentUpdateReasonJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(TaskAgentUpdateReason).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + Object newValue = null; + JToken propertyValue; + JObject value = JObject.Load(reader); + + if (value.TryGetValue("Code", StringComparison.OrdinalIgnoreCase, out propertyValue)) + { + if (propertyValue.Type == JTokenType.String) + { + TaskAgentUpdateReasonType code; + if (Enum.TryParse((String)propertyValue, out code)) + { + switch (code) + { + case TaskAgentUpdateReasonType.Manual: + newValue = new TaskAgentManualUpdate(); + break; + + case TaskAgentUpdateReasonType.MinAgentVersionRequired: + newValue = new TaskAgentMinAgentVersionRequiredUpdate(); + break; + } + + } + } + } + + if (newValue == null) + { + return existingValue; + } + + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + // The virtual method returns false for CanWrite so this should never be invoked + throw new NotSupportedException(); + } + } + + [DataContract] + [ServiceEventObjectAttribute] + [JsonConverter(typeof(TaskAgentUpdateReasonJsonConverter))] + public abstract class TaskAgentUpdateReason + { + protected TaskAgentUpdateReason(TaskAgentUpdateReasonType code) + { + this.Code = code; + } + + [DataMember] + public TaskAgentUpdateReasonType Code { get; private set; } + } + + [DataContract] + public class TaskAgentManualUpdate : TaskAgentUpdateReason + { + [JsonConstructor] + internal TaskAgentManualUpdate() : + base(TaskAgentUpdateReasonType.Manual) + { + } + + public TaskAgentManualUpdate Clone() + { + return new TaskAgentManualUpdate(); + } + } + + [DataContract] + public class TaskAgentMinAgentVersionRequiredUpdate : TaskAgentUpdateReason + { + [JsonConstructor] + internal TaskAgentMinAgentVersionRequiredUpdate() : + base(TaskAgentUpdateReasonType.MinAgentVersionRequired) + { + } + + private TaskAgentMinAgentVersionRequiredUpdate(TaskAgentMinAgentVersionRequiredUpdate updateToBeCloned) : + base(TaskAgentUpdateReasonType.MinAgentVersionRequired) + { + if (updateToBeCloned.MinAgentVersion != null) + { + this.MinAgentVersion = updateToBeCloned.MinAgentVersion.Clone(); + } + if (updateToBeCloned.JobDefinition != null) + { + this.JobDefinition = updateToBeCloned.JobDefinition.Clone(); + } + if (updateToBeCloned.JobOwner != null) + { + this.JobOwner = updateToBeCloned.JobOwner.Clone(); + } + } + + [DataMember] + public Demand MinAgentVersion + { + get; + set; + } + + [DataMember] + public TaskOrchestrationOwner JobDefinition + { + get; + set; + } + + [DataMember] + public TaskOrchestrationOwner JobOwner + { + get; + set; + } + + public TaskAgentMinAgentVersionRequiredUpdate Clone() + { + return new TaskAgentMinAgentVersionRequiredUpdate(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs b/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs new file mode 100644 index 00000000000..0b55a06d296 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskAttachment.cs @@ -0,0 +1,105 @@ +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskAttachment + { + internal TaskAttachment() + { } + + internal TaskAttachment(String type, String name, ReferenceLinks links) + { + ArgumentUtility.CheckStringForNullOrEmpty(type, "type"); + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + this.Type = type; + this.Name = name; + this.m_links = links; + } + + public TaskAttachment(String type, String name) + { + ArgumentUtility.CheckStringForNullOrEmpty(type, "type"); + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + this.Type = type; + this.Name = name; + } + + + [DataMember] + public String Type + { + get; + internal set; + } + + [DataMember] + public String Name + { + get; + internal set; + } + + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + [DataMember] + public DateTime CreatedOn + { + get; + internal set; + } + + [DataMember] + public DateTime LastChangedOn + { + get; + internal set; + } + + [DataMember] + public Guid LastChangedBy + { + get; + internal set; + } + + [DataMember] + public Guid TimelineId + { + get; + set; + } + + [DataMember] + public Guid RecordId + { + get; + set; + } + + [DataMember(Name = "_links", EmitDefaultValue = false)] + private ReferenceLinks m_links; + } + + [GenerateAllConstants] + public class CoreAttachmentType + { + public static readonly String Log = "DistributedTask.Core.Log"; + public static readonly String Summary = "DistributedTask.Core.Summary"; + public static readonly String FileAttachment = "DistributedTask.Core.FileAttachment"; + public static readonly String DiagnosticLog = "DistributedTask.Core.DiagnosticLog"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskDefinition.cs b/src/Sdk/DTWebApi/WebApi/TaskDefinition.cs new file mode 100644 index 00000000000..65c5a828847 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskDefinition.cs @@ -0,0 +1,552 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Runtime.Serialization; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [DebuggerDisplay("Id: {Id}, Name: {Name}, Version: {Version}")] + public class TaskDefinition + { + public TaskDefinition() + { + this.DefinitionType = TaskDefinitionType.Task; + } + + protected TaskDefinition(TaskDefinition taskDefinitionToClone) + { + if (taskDefinitionToClone.AgentExecution != null) + { + this.AgentExecution = taskDefinitionToClone.AgentExecution.Clone(); + } + + if (taskDefinitionToClone.PreJobExecution != null) + { + this.m_preJobExecution = new Dictionary(taskDefinitionToClone.m_preJobExecution); + } + + if (taskDefinitionToClone.Execution != null) + { + this.m_execution = new Dictionary(taskDefinitionToClone.m_execution); + } + + if (taskDefinitionToClone.PostJobExecution != null) + { + this.m_postJobExecution = new Dictionary(taskDefinitionToClone.m_postJobExecution); + } + + this.Author = taskDefinitionToClone.Author; + this.Category = taskDefinitionToClone.Category; + this.HelpMarkDown = taskDefinitionToClone.HelpMarkDown; + this.HelpUrl = taskDefinitionToClone.HelpUrl; + this.ContentsUploaded = taskDefinitionToClone.ContentsUploaded; + + if (taskDefinitionToClone.m_visibilities != null) + { + this.m_visibilities = new List(taskDefinitionToClone.m_visibilities); + } + + if (taskDefinitionToClone.m_runsOn != null) + { + this.m_runsOn = new List(taskDefinitionToClone.m_runsOn); + } + + if (this.m_runsOn == null) + { + this.m_runsOn = new List(TaskRunsOnConstants.DefaultValue); + } + + if (taskDefinitionToClone.m_demands != null) + { + this.m_demands = new List(taskDefinitionToClone.m_demands.Select(x => x.Clone())); + } + + this.Description = taskDefinitionToClone.Description; + this.FriendlyName = taskDefinitionToClone.FriendlyName; + this.HostType = taskDefinitionToClone.HostType; + this.IconUrl = taskDefinitionToClone.IconUrl; + this.Id = taskDefinitionToClone.Id; + + if (taskDefinitionToClone.m_inputs != null) + { + this.m_inputs = new List(taskDefinitionToClone.m_inputs.Select(x => x.Clone())); + } + + if (taskDefinitionToClone.m_satisfies != null) + { + this.m_satisfies = new List(taskDefinitionToClone.m_satisfies); + } + + if (taskDefinitionToClone.m_sourceDefinitions != null) + { + this.m_sourceDefinitions = new List(taskDefinitionToClone.m_sourceDefinitions.Select(x => x.Clone())); + } + + if (taskDefinitionToClone.m_dataSourceBindings != null) + { + this.m_dataSourceBindings = new List(taskDefinitionToClone.m_dataSourceBindings.Select(x => x.Clone())); + } + + if (taskDefinitionToClone.m_groups != null) + { + this.m_groups = new List(taskDefinitionToClone.m_groups.Select(x => x.Clone())); + } + + if (taskDefinitionToClone.m_outputVariables != null) + { + this.m_outputVariables = new List(taskDefinitionToClone.m_outputVariables.Select(x => x.Clone())); + } + + this.InstanceNameFormat = taskDefinitionToClone.InstanceNameFormat; + this.MinimumAgentVersion = taskDefinitionToClone.MinimumAgentVersion; + this.Name = taskDefinitionToClone.Name; + this.PackageLocation = taskDefinitionToClone.PackageLocation; + this.PackageType = taskDefinitionToClone.PackageType; + this.ServerOwned = taskDefinitionToClone.ServerOwned; + this.SourceLocation = taskDefinitionToClone.SourceLocation; + this.Version = taskDefinitionToClone.Version.Clone(); + this.ContributionIdentifier = taskDefinitionToClone.ContributionIdentifier; + this.ContributionVersion = taskDefinitionToClone.ContributionVersion; + this.Deprecated = taskDefinitionToClone.Deprecated; + this.Disabled = taskDefinitionToClone.Disabled; + this.DefinitionType = taskDefinitionToClone.DefinitionType; + this.ShowEnvironmentVariables = taskDefinitionToClone.ShowEnvironmentVariables; + this.Preview = taskDefinitionToClone.Preview; + this.ReleaseNotes = taskDefinitionToClone.ReleaseNotes; + + if (this.DefinitionType == null) + { + this.DefinitionType = TaskDefinitionType.Task; + } + } + + // + // Members to identify this task + // + [DataMember(EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskVersion Version + { + get; + set; + } + + [Obsolete("Ecosystem property is not currently supported.")] + [DataMember(EmitDefaultValue = false)] + public String Ecosystem + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean ServerOwned + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean ContentsUploaded + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String IconUrl + { + get; + set; + } + + // + // Location Information for acquisition + // + [DataMember(EmitDefaultValue = false)] + public String HostType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String PackageType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String PackageLocation + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String SourceLocation + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String MinimumAgentVersion + { + get; + set; + } + + // + // Helpful Metadata for discovery and designer + // + [DataMember(EmitDefaultValue = false)] + public String FriendlyName + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Category + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String HelpMarkDown + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String HelpUrl + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String ReleaseNotes + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean Preview + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean Deprecated + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String ContributionIdentifier + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String ContributionVersion + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean Disabled + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String DefinitionType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean ShowEnvironmentVariables + { + get; + set; + } + + public IList Visibility + { + get + { + if (m_visibilities == null) + { + m_visibilities = new List(); + } + return m_visibilities; + } + } + + public IList RunsOn + { + get + { + if (m_runsOn == null) + { + m_runsOn = new List(TaskRunsOnConstants.DefaultValue); + } + + return m_runsOn; + } + } + + [DataMember(EmitDefaultValue = false)] + public String Author { get; set; } + + [DataMember(EmitDefaultValue = false)] + public IList Demands + { + get + { + if (m_demands == null) + { + m_demands = new List(); + } + return m_demands; + } + } + + [DataMember(EmitDefaultValue = false)] + public IList Groups + { + get + { + if (m_groups == null) + { + m_groups = new List(); + } + return m_groups; + } + } + + [DataMember(EmitDefaultValue = false)] + public IList Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new List(); + } + return m_inputs; + } + } + + [DataMember(EmitDefaultValue = false)] + public IList Satisfies + { + get + { + if (m_satisfies == null) + { + m_satisfies = new List(); + } + return m_satisfies; + } + } + + [DataMember(EmitDefaultValue = false)] + public IList SourceDefinitions + { + get + { + if (m_sourceDefinitions == null) + { + m_sourceDefinitions = new List(); + } + return m_sourceDefinitions; + } + } + + [DataMember(EmitDefaultValue = false)] + public IList DataSourceBindings + { + get + { + if (m_dataSourceBindings == null) + { + m_dataSourceBindings = new List(); + } + return m_dataSourceBindings; + } + } + + [DataMember(EmitDefaultValue = false)] + public String InstanceNameFormat + { + get; + set; + } + + // + // Execution members + // + [DataMember(EmitDefaultValue = false)] + public IDictionary PreJobExecution + { + get + { + if (m_preJobExecution == null) + { + m_preJobExecution = new Dictionary(); + } + return m_preJobExecution; + } + } + + [DataMember(EmitDefaultValue = false)] + public IDictionary Execution + { + get + { + if (m_execution == null) + { + m_execution = new Dictionary(); + } + return m_execution; + } + } + + [DataMember(EmitDefaultValue = false)] + public IDictionary PostJobExecution + { + get + { + if (m_postJobExecution == null) + { + m_postJobExecution = new Dictionary(); + } + return m_postJobExecution; + } + } + + [DataMember(EmitDefaultValue = false)] + public TaskExecution AgentExecution + { + get; + set; + } + + public IList OutputVariables + { + get + { + if (m_outputVariables == null) + { + m_outputVariables = new List(); + } + return m_outputVariables; + } + } + + internal TaskDefinition Clone() + { + return new TaskDefinition(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedVisibilities, ref m_visibilities, true); + SerializationHelper.Copy(ref m_serializedRunsOn, ref m_runsOn, true); + RenameLegacyRunsOnValues(m_runsOn); + SerializationHelper.Copy(ref m_serializedOutputVariables, ref m_outputVariables, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_visibilities, ref m_serializedVisibilities); + RenameLegacyRunsOnValues(m_runsOn); + SerializationHelper.Copy(ref m_runsOn, ref m_serializedRunsOn); + SerializationHelper.Copy(ref m_outputVariables, ref m_serializedOutputVariables); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedVisibilities = null; + m_serializedRunsOn = null; + m_serializedOutputVariables = null; + } + + private static void RenameLegacyRunsOnValues(IList runsOn) + { + for (int i = 0; i < runsOn?.Count(); i++) + { + if (runsOn[i].Equals(TaskRunsOnConstants.RunsOnMachineGroup, StringComparison.OrdinalIgnoreCase)) + { + runsOn[i] = TaskRunsOnConstants.RunsOnDeploymentGroup; + } + } + } + + // + // Private + // + [DataMember(Name = "Visibility", EmitDefaultValue = false)] + private List m_serializedVisibilities; + + [DataMember(Name = "RunsOn", EmitDefaultValue = false)] + private List m_serializedRunsOn; + + [DataMember(Name = "OutputVariables", EmitDefaultValue = false)] + private List m_serializedOutputVariables; + + private Dictionary m_preJobExecution; + private Dictionary m_execution; + private Dictionary m_postJobExecution; + private List m_demands; + private List m_inputs; + private List m_satisfies; + private List m_sourceDefinitions; + private List m_dataSourceBindings; + private List m_groups; + private List m_outputVariables; + private List m_visibilities; + private List m_runsOn; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskDefinitionEndpoint.cs b/src/Sdk/DTWebApi/WebApi/TaskDefinitionEndpoint.cs new file mode 100644 index 00000000000..7480da3110b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskDefinitionEndpoint.cs @@ -0,0 +1,87 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskDefinitionEndpoint + { + /// + /// The scope as understood by Connected Services. + /// Essentialy, a project-id for now. + /// + [DataMember] + public String Scope + { + get; + set; + } + + /// + /// URL to GET. + /// + [DataMember] + public String Url + { + get; + set; + } + + /// + /// An XPath/Json based selector to filter response returned by fetching + /// the endpoint Url. An XPath based selector must be prefixed with + /// the string "xpath:". A Json based selector must be prefixed with "jsonpath:". + /// + /// The following selector defines an XPath for extracting nodes named 'ServiceName'. + /// + /// endpoint.Selector = "xpath://ServiceName"; + /// + /// + /// + [DataMember] + public String Selector + { + get; + set; + } + + /// + /// An Json based keyselector to filter response returned by fetching + /// the endpoint Url.A Json based keyselector must be prefixed with "jsonpath:". + /// KeySelector can be used to specify the filter to get the keys for the values specified with Selector. + /// + /// The following keyselector defines an Json for extracting nodes named 'ServiceName'. + /// + /// endpoint.KeySelector = "jsonpath://ServiceName"; + /// + /// + /// + [DataMember] + public String KeySelector + { + get; + set; + } + + /// + /// An ID that identifies a service connection to be used for authenticating + /// endpoint requests. + /// + [DataMember] + public String ConnectionId + { + get; + set; + } + + /// + /// TaskId that this endpoint belongs to. + /// + [DataMember] + public String TaskId + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskDefinitionReference.cs b/src/Sdk/DTWebApi/WebApi/TaskDefinitionReference.cs new file mode 100644 index 00000000000..28acbcba0f1 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskDefinitionReference.cs @@ -0,0 +1,78 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskDefinitionReference + { + public TaskDefinitionReference() + { + // Default is Task + this.DefinitionType = TaskDefinitionType.Task; + } + + private TaskDefinitionReference(TaskDefinitionReference definitionReference) + { + this.Id = definitionReference.Id; + this.VersionSpec = definitionReference.VersionSpec; + + // If it is null, we set it to task + this.DefinitionType = definitionReference.DefinitionType ?? TaskDefinitionType.Task; + } + + /// + /// Gets or sets the unique identifier of task. + /// + [DataMember(IsRequired = true)] + public Guid Id { get; set; } + + /// + /// Gets or sets the version specification of task. + /// + [DataMember(IsRequired = true)] + public String VersionSpec { get; set; } + + /// + /// Gets or sets the definition type. Values can be 'task' or 'metaTask'. + /// + [DataMember(IsRequired = true)] + public String DefinitionType + { + get + { + return this.m_definitionType ?? (this.m_definitionType = TaskDefinitionType.Task); + } + + set + { + this.m_definitionType = value; + } + } + + public override bool Equals(object obj) + { + var toEqual = (TaskDefinitionReference)obj; + if (toEqual == null) + { + return false; + } + + return this.Id.Equals(toEqual.Id) && + (this.VersionSpec?.Equals(toEqual.VersionSpec) ?? this.VersionSpec == toEqual.VersionSpec) && + (this.DefinitionType?.Equals(toEqual.DefinitionType) ?? this.DefinitionType == toEqual.DefinitionType); + } + + public override int GetHashCode() + { + return this.ToString().GetHashCode(); + } + + internal TaskDefinitionReference Clone() + { + return new TaskDefinitionReference(this); + } + + private String m_definitionType; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskDefinitionType.cs b/src/Sdk/DTWebApi/WebApi/TaskDefinitionType.cs new file mode 100644 index 00000000000..83bf7eaeef1 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskDefinitionType.cs @@ -0,0 +1,12 @@ + +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public class TaskDefinitionType + { + public const String Task = "task"; + + public const String MetaTask = "metaTask"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskExecution.cs b/src/Sdk/DTWebApi/WebApi/TaskExecution.cs new file mode 100644 index 00000000000..32b02264cbb --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskExecution.cs @@ -0,0 +1,68 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskExecution + { + public TaskExecution() + { + } + + private TaskExecution(TaskExecution taskExecutionToBeCloned) + { + if (taskExecutionToBeCloned.ExecTask != null) + { + this.ExecTask = taskExecutionToBeCloned.ExecTask.Clone(); + } + + if (taskExecutionToBeCloned.PlatformInstructions != null) + { + this.PlatformInstructions = new Dictionary>(taskExecutionToBeCloned.PlatformInstructions, StringComparer.OrdinalIgnoreCase); + } + } + + /// + /// The utility task to run. Specifying this means that this task definition is simply a meta task to call another task. + /// This is useful for tasks that call utility tasks like powershell and commandline + /// + [DataMember(Order = 10, EmitDefaultValue = false)] + public TaskReference ExecTask + { + get; + set; + } + + /// + /// If a task is going to run code, then this provides the type/script etc... information by platform. + /// For example, it might look like. + /// net45: { + /// typeName: "GitHub.Automation.Tasks.PowerShellTask", + /// assemblyName: "GitHub.Automation.Tasks.PowerShell.dll" + /// } + /// net20: { + /// typeName: "GitHub.Automation.Tasks.PowerShellTask", + /// assemblyName: "GitHub.Automation.Tasks.PowerShell.dll" + /// } + /// java: { + /// jar: "powershelltask.tasks.automation.teamfoundation.microsoft.com", + /// } + /// node: { + /// script: "powershellhost.js", + /// } + /// + [DataMember(Order = 20, EmitDefaultValue = false)] + public Dictionary> PlatformInstructions + { + get; + set; + } + + internal TaskExecution Clone() + { + return new TaskExecution(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroup.cs b/src/Sdk/DTWebApi/WebApi/TaskGroup.cs new file mode 100644 index 00000000000..1d57777ea6e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroup.cs @@ -0,0 +1,173 @@ +using System; +using System.Runtime.Serialization; +using System.Collections.Generic; +using System.Linq; + +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskGroup: TaskDefinition + { + /// + /// A task group lets you to encapsulate a sequence of tasks already defined in a build definition, a release definition or a task group into a single reusable task. + /// + public TaskGroup() + { + this.DefinitionType = TaskDefinitionType.MetaTask; + } + + private TaskGroup(TaskGroup definition) : base(definition) + { + this.DefinitionType = TaskDefinitionType.MetaTask; + + this.Owner = definition.Owner; + this.Revision = definition.Revision; + this.CreatedOn = definition.CreatedOn; + this.ModifiedOn = definition.ModifiedOn; + this.Comment = definition.Comment; + this.ParentDefinitionId = definition.ParentDefinitionId; + + if (definition.Tasks != null) + { + this.Tasks = new List(definition.Tasks.Select(x => x.Clone())); + } + + if (definition.CreatedBy != null) + { + this.CreatedBy = definition.CreatedBy.Clone(); + } + + if (definition.ModifiedBy != null) + { + this.ModifiedBy = definition.ModifiedBy.Clone(); + } + } + + public IList Tasks + { + get + { + if (m_tasks == null) + { + m_tasks = new List(); + } + + return m_tasks; + } + set + { + if (value == null) + { + m_tasks = new List(); + } + else + { + this.m_tasks = value; + } + } + } + + /// + /// Gets or sets the owner. + /// + [DataMember(EmitDefaultValue = false)] + public String Owner + { + get; + set; + } + + /// + /// Gets or sets revision. + /// + [DataMember] + public Int32 Revision + { + get; + set; + } + + /// + /// Gets or sets the identity who created. + /// + [DataMember] + public IdentityRef CreatedBy + { + get; + set; + } + + /// + /// Gets or sets date on which it got created. + /// + [DataMember] + public DateTime CreatedOn + { + get; + set; + } + + /// + /// Gets or sets the identity who modified. + /// + [DataMember] + public IdentityRef ModifiedBy + { + get; + set; + } + + /// + /// Gets or sets date on which it got modified. + /// + [DataMember] + public DateTime ModifiedOn + { + get; + set; + } + + /// + /// Gets or sets comment. + /// + [DataMember(EmitDefaultValue = false)] + public String Comment + { + get; + set; + } + + /// + /// Gets or sets parent task group Id. This is used while creating a draft task group. + /// + [DataMember(EmitDefaultValue = false)] + public Guid? ParentDefinitionId + { + get; + set; + } + + /// + /// Gets or sets as 'true' to indicate as deleted, 'false' otherwise. + /// + [DataMember(EmitDefaultValue = false)] + public bool Deleted + { + get; + set; + } + + internal new TaskGroup Clone() + { + return new TaskGroup(this); + } + + /// + /// Gets or sets the tasks. + /// + [DataMember(Name = "Tasks")] + private IList m_tasks; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupCreateParameter.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupCreateParameter.cs new file mode 100644 index 00000000000..0ac49e6c80a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupCreateParameter.cs @@ -0,0 +1,140 @@ +using System; +using System.Runtime.Serialization; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskGroupCreateParameter + { + /// + /// Sets name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + /// + /// Sets friendly name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String FriendlyName { get; set; } + + /// + /// Sets author name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Author { get; set; } + + /// + /// Sets description of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description { get; set; } + + /// + /// Sets parent task group Id. This is used while creating a draft task group. + /// + [DataMember(EmitDefaultValue = false)] + public Guid? ParentDefinitionId { get; set; } + + /// + /// Sets url icon of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String IconUrl { get; set; } + + /// + /// Sets display name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String InstanceNameFormat { get; set; } + + /// + /// Sets category of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Category { get; set; } + + /// + /// Sets version of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public TaskVersion Version { get; set; } + + public IList RunsOn + { + get + { + if (m_runsOn == null) + { + m_runsOn = new List(TaskRunsOnConstants.DefaultValue); + } + + return m_runsOn; + } + } + + public IList Tasks + { + get + { + if (m_tasks == null) + { + m_tasks = new List(); + } + + return m_tasks; + } + } + + public IList Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new List(); + } + return m_inputs; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedRunsOn, ref m_runsOn, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_runsOn, ref m_serializedRunsOn); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedRunsOn = null; + } + + /// + /// Sets RunsOn of the task group. Value can be 'Agent', 'Server' or 'DeploymentGroup'. + /// + [DataMember(Name = "RunsOn", EmitDefaultValue = false)] + private List m_serializedRunsOn; + + /// + /// Sets tasks for the task group. + /// + [DataMember(Name = "Tasks", EmitDefaultValue = false)] + private IList m_tasks; + + /// + /// Sets input for the task group. + /// + [DataMember(Name = "Inputs", EmitDefaultValue = false)] + private List m_inputs; + + private List m_runsOn; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupDefinition.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupDefinition.cs new file mode 100644 index 00000000000..191ed3e0df0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupDefinition.cs @@ -0,0 +1,96 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskGroupDefinition + { + public TaskGroupDefinition() + { + IsExpanded = false; + } + + private TaskGroupDefinition(TaskGroupDefinition inputDefinitionToClone) + { + this.IsExpanded = inputDefinitionToClone.IsExpanded; + this.Name = inputDefinitionToClone.Name; + this.DisplayName = inputDefinitionToClone.DisplayName; + this.VisibleRule = inputDefinitionToClone.VisibleRule; + + if (inputDefinitionToClone.m_tags != null) + { + this.m_tags = new List(inputDefinitionToClone.m_tags); + } + } + + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean IsExpanded + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String VisibleRule + { + get; + set; + } + + public IList Tags + { + get + { + if (m_tags == null) + { + m_tags = new List(); + } + return m_tags; + } + } + + public TaskGroupDefinition Clone() + { + return new TaskGroupDefinition(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedTags, ref m_tags, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_tags, ref m_serializedTags); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedTags = null; + } + + [DataMember(Name = "Tags", EmitDefaultValue = false)] + private List m_serializedTags; + + private List m_tags; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupExpands.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupExpands.cs new file mode 100644 index 00000000000..b9868d577e9 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupExpands.cs @@ -0,0 +1,11 @@ +namespace GitHub.DistributedTask.WebApi +{ + using System; + + [Flags] + public enum TaskGroupExpands + { + None = 0, + Tasks = 2, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupQueryOrder.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupQueryOrder.cs new file mode 100644 index 00000000000..c2184056945 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupQueryOrder.cs @@ -0,0 +1,30 @@ +// -------------------------------------------------------------------------------------------------------------------- +// +// 2012-2023, All rights reserved. +// +// -------------------------------------------------------------------------------------------------------------------- + +namespace GitHub.DistributedTask.WebApi +{ + using System.Runtime.Serialization; + + /// + /// Specifies the desired ordering of taskGroups. + /// + [DataContract] + public enum TaskGroupQueryOrder + { + /// + /// Order by createdon ascending. + /// + [EnumMember] + CreatedOnAscending = 0, + + /// + /// Order by createdon descending. + /// + [EnumMember] + CreatedOnDescending = 1, + + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupRevision.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupRevision.cs new file mode 100644 index 00000000000..7f13ec8c273 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupRevision.cs @@ -0,0 +1,34 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskGroupRevision + { + [DataMember] + public Guid TaskGroupId { get; set; } + + [DataMember] + public Int32 Revision { get; set; } + + [DataMember] + public Int32 MajorVersion { get; set; } + + [DataMember] + public IdentityRef ChangedBy { get; set; } + + [DataMember] + public DateTime ChangedDate { get; set; } + + [DataMember] + public AuditAction ChangeType { get; set; } + + [DataMember] + public Int32 FileId { get; set; } + + [DataMember] + public String Comment { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupStep.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupStep.cs new file mode 100644 index 00000000000..e20fd2775f8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupStep.cs @@ -0,0 +1,156 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Represents tasks in the task group. + /// + [DataContract] + public class TaskGroupStep + { + public TaskGroupStep() + { + } + + private TaskGroupStep(TaskGroupStep taskGroupStep) + { + this.DisplayName = taskGroupStep.DisplayName; + this.AlwaysRun = taskGroupStep.AlwaysRun; + this.ContinueOnError = taskGroupStep.ContinueOnError; + this.Enabled = taskGroupStep.Enabled; + this.TimeoutInMinutes = taskGroupStep.TimeoutInMinutes; + this.Inputs = new Dictionary(taskGroupStep.Inputs); + + if (taskGroupStep.m_environment != null) + { + foreach (var property in taskGroupStep.m_environment) + { + this.Environment[property.Key] = property.Value; + } + } + + this.Task = taskGroupStep.Task.Clone(); + } + + /// + /// Gets or sets the display name. + /// + [DataMember] + public String DisplayName + { + get + { + if (this.m_displayName == null) + { + this.m_displayName = String.Empty; + } + + return this.m_displayName; + } + set + { + this.m_displayName = value; + } + } + + /// + /// Gets or sets as 'true' to run the task always, 'false' otherwise. + /// + [DataMember] + public bool AlwaysRun { get; set; } + + /// + /// Gets or sets as 'true' to continue on error, 'false' otherwise. + /// + [DataMember] + public bool ContinueOnError { get; set; } + + /// + /// Gets or sets condition for the task. + /// + [DataMember(EmitDefaultValue = false)] + public String Condition { get; set; } + + /// + /// Gets or sets as task is enabled or not. + /// + [DataMember] + public bool Enabled { get; set; } + + /// + /// Gets or sets the maximum time, in minutes, that a task is allowed to execute on agent before being cancelled by server. A zero value indicates an infinite timeout. + /// + [DataMember] + public int TimeoutInMinutes { get; set; } + + /// + /// Gets or sets dictionary of inputs. + /// + [DataMember] + public IDictionary Inputs { get; set; } + + public IDictionary Environment + { + get + { + if (m_environment == null) + { + m_environment = new Dictionary(StringComparer.Ordinal); + } + return m_environment; + } + } + + /// + /// Gets dictionary of environment variables. + /// + [DataMember(Name = "Environment", EmitDefaultValue = false)] + private Dictionary m_environment; + + /// + /// Gets or sets the reference of the task. + /// + [DataMember] + public TaskDefinitionReference Task { get; set; } + + public static bool EqualsAndOldTaskInputsAreSubsetOfNewTaskInputs( + TaskGroupStep oldTaskGroupStep, + TaskGroupStep newTaskGroupStep) + { + if (!oldTaskGroupStep.DisplayName.Equals(newTaskGroupStep.DisplayName) + || oldTaskGroupStep.AlwaysRun != newTaskGroupStep.AlwaysRun + || oldTaskGroupStep.Enabled != newTaskGroupStep.Enabled + || oldTaskGroupStep.ContinueOnError != newTaskGroupStep.ContinueOnError + || !oldTaskGroupStep.Task.Equals(newTaskGroupStep.Task)) + { + return false; + } + + if (!(oldTaskGroupStep.Inputs != null && newTaskGroupStep.Inputs != null + && oldTaskGroupStep.Inputs.Keys.All(key => newTaskGroupStep.Inputs.ContainsKey(key) + && newTaskGroupStep.Inputs[key].Equals(oldTaskGroupStep.Inputs[key])))) + { + return false; + } + + if (!(oldTaskGroupStep.Environment != null + && oldTaskGroupStep.Environment.Keys.All(key => newTaskGroupStep.Environment.ContainsKey(key) + && newTaskGroupStep.Environment[key].Equals(oldTaskGroupStep.Environment[key])))) + { + return false; + } + + return true; + } + + internal TaskGroupStep Clone() + { + return new TaskGroupStep(this); + } + + private String m_displayName; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskGroupUpdateParameter.cs b/src/Sdk/DTWebApi/WebApi/TaskGroupUpdateParameter.cs new file mode 100644 index 00000000000..0c9d7ea8a08 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskGroupUpdateParameter.cs @@ -0,0 +1,158 @@ +using System; +using System.Runtime.Serialization; +using System.Collections.Generic; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskGroupUpdateParameter + { + /// + /// Sets the unique identifier of this field. + /// + [DataMember(EmitDefaultValue = false)] + public Guid Id { get; set; } + + /// + /// Sets name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name { get; set; } + + /// + /// Sets friendly name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String FriendlyName { get; set; } + + /// + /// Sets author name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Author { get; set; } + + /// + /// Sets description of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description { get; set; } + + /// + /// Sets comment of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Comment { get; set; } + + /// + /// Sets revision of the task group. + /// + [DataMember] + public Int32 Revision { get; set; } + + /// + /// Gets or sets parent task group Id. This is used while creating a draft task group. + /// + [DataMember(EmitDefaultValue = false)] + public Guid? ParentDefinitionId { get; set; } + + /// + /// Sets url icon of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String IconUrl { get; set; } + + /// + /// Sets display name of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String InstanceNameFormat { get; set; } + + /// + /// Sets category of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public String Category { get; set; } + + /// + /// Sets version of the task group. + /// + [DataMember(EmitDefaultValue = false)] + public TaskVersion Version { get; set; } + + public IList Tasks + { + get + { + if (m_tasks == null) + { + m_tasks = new List(); + } + + return m_tasks; + } + } + + public IList Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new List(); + } + return m_inputs; + } + } + + public IList RunsOn + { + get + { + if (m_runsOn == null) + { + m_runsOn = new List(TaskRunsOnConstants.DefaultValue); + } + + return m_runsOn; + } + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedRunsOn, ref m_runsOn, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_runsOn, ref m_serializedRunsOn); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedRunsOn = null; + } + + /// + /// Sets tasks for the task group. + /// + [DataMember(Name = "Tasks")] + private IList m_tasks; + + /// + /// Sets input for the task group. + /// + [DataMember(Name = "Inputs", EmitDefaultValue = false)] + private List m_inputs; + + /// + /// Sets RunsOn of the task group. Value can be 'Agent', 'Server' or 'DeploymentGroup'. + /// + [DataMember(Name = "RunsOn", EmitDefaultValue = false)] + private List m_serializedRunsOn; + + private List m_runsOn; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskHttpClient.cs b/src/Sdk/DTWebApi/WebApi/TaskHttpClient.cs new file mode 100644 index 00000000000..c0691ff6c33 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskHttpClient.cs @@ -0,0 +1,147 @@ +using GitHub.DistributedTask.Pipelines.Runtime; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.DistributedTask.WebApi +{ + public sealed class TaskHttpClient : TaskHttpClientBase + { + public TaskHttpClient( + Uri baseUrl, + VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public TaskHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public TaskHttpClient( + Uri baseUrl, + VssCredentials credentials, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public TaskHttpClient( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings, + params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public TaskHttpClient( + Uri baseUrl, + HttpMessageHandler pipeline, + Boolean disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + public Task AppendTimelineRecordFeedAsync( + Guid scopeIdentifier, + String planType, + Guid planId, + Guid timelineId, + Guid recordId, + IEnumerable lines, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) + { + return AppendTimelineRecordFeedAsync(scopeIdentifier, + planType, + planId, + timelineId, + recordId, + new TimelineRecordFeedLinesWrapper(Guid.Empty, lines.ToList()), + userState, + cancellationToken); + } + + public Task AppendTimelineRecordFeedAsync( + Guid scopeIdentifier, + String planType, + Guid planId, + Guid timelineId, + Guid recordId, + Guid stepId, + IList lines, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) + { + return AppendTimelineRecordFeedAsync(scopeIdentifier, + planType, + planId, + timelineId, + recordId, + new TimelineRecordFeedLinesWrapper(stepId, lines), + userState, + cancellationToken); + } + + public async Task RaisePlanEventAsync( + Guid scopeIdentifier, + String planType, + Guid planId, + T eventData, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) where T : JobEvent + { + var routeValues = new { scopeIdentifier = scopeIdentifier, hubName = planType, planId = planId }; + await base.PostAsync(eventData, + TaskResourceIds.PlanEvents, + routeValues, + version: m_currentApiVersion, + cancellationToken: cancellationToken, + userState: userState).ConfigureAwait(false); + } + + public Task> UpdateTimelineRecordsAsync( + Guid scopeIdentifier, + String planType, + Guid planId, + Guid timelineId, + IEnumerable records, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) + { + return UpdateRecordsAsync(scopeIdentifier, + planType, + planId, + timelineId, + new VssJsonCollectionWrapper>(records), + userState, + cancellationToken); + } + + public Task GetAgentRequestJobAsync( + Guid scopeIdentifier, + String planType, + String orchestrationId, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) + { + return base.GetJobInstanceAsync(scopeIdentifier, + planType, + orchestrationId, + userState, + cancellationToken); + } + + private readonly ApiResourceVersion m_currentApiVersion = new ApiResourceVersion(2.0, 1); + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskHubLicenseDetails.cs b/src/Sdk/DTWebApi/WebApi/TaskHubLicenseDetails.cs new file mode 100644 index 00000000000..33d63779316 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskHubLicenseDetails.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskHubLicenseDetails + { + public static readonly Int32 DefaultFreeLicenseCount = 0; + + [DataMember(Name = "FreeLicenseCount")] + public Int32 FreePrivateLicenseCount; + + [DataMember] + public Int32 FreeHostedLicenseCount; + + [DataMember] + public Int32 EnterpriseUsersCount; + + /// + /// Self-hosted licenses purchased from VSTS directly. + /// + [DataMember(Name = "PurchasedLicenseCount")] + public Int32 PurchasedPrivateLicenseCount; + + /// + /// Microsoft-hosted licenses purchased from VSTS directly. + /// + [DataMember] + public Int32 PurchasedHostedLicenseCount; + + [DataMember] + public Boolean HostedLicensesArePremium; + + /// + /// Microsoft-hosted licenses purchased from secondary marketplaces. + /// + public List MarketplacePurchasedHostedLicenses + { + get + { + if (m_marketplacePurchasedHostedLicenses == null) + { + m_marketplacePurchasedHostedLicenses = new List(); + } + return m_marketplacePurchasedHostedLicenses; + } + } + + [DataMember] + public Int32 TotalLicenseCount; + + [DataMember] + public Boolean HasLicenseCountEverUpdated; + + [DataMember] + public Int32 MsdnUsersCount; + + [DataMember] + public Int32 HostedAgentMinutesFreeCount; + + [DataMember] + public Int32 HostedAgentMinutesUsedCount; + + [DataMember] + public Boolean FailedToReachAllProviders; + + [DataMember] + public Int32 TotalPrivateLicenseCount; + + [DataMember] + public Int32 TotalHostedLicenseCount; + + [DataMember(Name = "MarketplacePurchasedHostedLicenses", EmitDefaultValue = false)] + private List m_marketplacePurchasedHostedLicenses; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskInputDefinition.cs b/src/Sdk/DTWebApi/WebApi/TaskInputDefinition.cs new file mode 100644 index 00000000000..6d4c79cca2f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskInputDefinition.cs @@ -0,0 +1,35 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using CommonContracts = GitHub.DistributedTask.Common.Contracts; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskInputDefinition : CommonContracts.TaskInputDefinitionBase + { + public TaskInputDefinition() + : base() + { + } + + private TaskInputDefinition(TaskInputDefinition inputDefinitionToClone) + : base(inputDefinitionToClone) + { + } + + private TaskInputDefinition(TaskInputDefinition inputDefinitionToClone, ISecuredObject securedObject) + : base(inputDefinitionToClone, securedObject) + { + } + + public TaskInputDefinition Clone() + { + return new TaskInputDefinition(this); + } + + public override CommonContracts.TaskInputDefinitionBase Clone(ISecuredObject securedObject) + { + return base.Clone(securedObject); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskInputType.cs b/src/Sdk/DTWebApi/WebApi/TaskInputType.cs new file mode 100644 index 00000000000..3d1227dd112 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskInputType.cs @@ -0,0 +1,15 @@ +using CommonContracts = GitHub.DistributedTask.Common.Contracts; +using System.Runtime.Serialization; +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class TaskInputType + { + public const String String = CommonContracts.TaskInputType.String; + public const String Repository = CommonContracts.TaskInputType.Repository; + public const String Boolean = CommonContracts.TaskInputType.Boolean; + public const String KeyValue = CommonContracts.TaskInputType.KeyValue; + public const String FilePath = CommonContracts.TaskInputType.FilePath; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskInstance.cs b/src/Sdk/DTWebApi/WebApi/TaskInstance.cs new file mode 100644 index 00000000000..884e817487d --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskInstance.cs @@ -0,0 +1,130 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskInstance : TaskReference + { + public TaskInstance() + { + // Enabled defaults to true + this.Enabled = true; + } + + private TaskInstance(TaskInstance taskToBeCloned) + : base(taskToBeCloned) + { + this.InstanceId = taskToBeCloned.InstanceId; + this.DisplayName = taskToBeCloned.DisplayName; + this.Enabled = taskToBeCloned.Enabled; + this.Condition = taskToBeCloned.Condition; + this.ContinueOnError = taskToBeCloned.ContinueOnError; + this.AlwaysRun = taskToBeCloned.AlwaysRun; + this.TimeoutInMinutes = taskToBeCloned.TimeoutInMinutes; + this.RefName = taskToBeCloned.RefName; + + if (taskToBeCloned.m_environment != null) + { + m_environment = new Dictionary(taskToBeCloned.m_environment, StringComparer.Ordinal); + } + } + + [DataMember] + public Guid InstanceId + { + get; + set; + } + + [DataMember] + public String DisplayName + { + get; + set; + } + + [DataMember] + public Boolean Enabled + { + get; + set; + } + + [DataMember] + public String Condition + { + get; + set; + } + + [DataMember] + public Boolean ContinueOnError + { + get; + set; + } + + [DataMember] + public Boolean AlwaysRun + { + get; + set; + } + + [DataMember] + public int TimeoutInMinutes + { + get; + set; + } + + [DataMember] + public String RefName + { + get; + set; + } + + public IDictionary Environment + { + get + { + if (m_environment == null) + { + m_environment = new Dictionary(StringComparer.Ordinal); + } + return m_environment; + } + } + + public override TaskReference Clone() + { + return new TaskInstance(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedEnvironment, ref m_environment, StringComparer.Ordinal, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_environment, ref m_serializedEnvironment); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedEnvironment = null; + } + + private IDictionary m_environment; + + [DataMember(EmitDefaultValue = false, Name = "Environment")] + private IDictionary m_serializedEnvironment; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskLog.cs b/src/Sdk/DTWebApi/WebApi/TaskLog.cs new file mode 100644 index 00000000000..aad479097aa --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskLog.cs @@ -0,0 +1,55 @@ +using GitHub.Services.Common; +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskLog : TaskLogReference + { + internal TaskLog() + { + } + + public TaskLog(String path) + { + ArgumentUtility.CheckStringForNullOrEmpty(path, "path"); + this.Path = path; + } + + [DataMember(EmitDefaultValue = false)] + public Uri IndexLocation + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Path + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Int64 LineCount + { + get; + set; + } + + [DataMember] + public DateTime CreatedOn + { + get; + internal set; + } + + [DataMember] + public DateTime LastChangedOn + { + get; + internal set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskLogReference.cs b/src/Sdk/DTWebApi/WebApi/TaskLogReference.cs new file mode 100644 index 00000000000..b1e65d3be1a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskLogReference.cs @@ -0,0 +1,23 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskLogReference + { + [DataMember] + public Int32 Id + { + get; + set; + } + + [DataMember] + public Uri Location + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationContainer.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationContainer.cs new file mode 100644 index 00000000000..ed97476e121 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationContainer.cs @@ -0,0 +1,116 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskOrchestrationContainer : TaskOrchestrationItem, IOrchestrationProcess + { + public TaskOrchestrationContainer() + : base(TaskOrchestrationItemType.Container) + { + ContinueOnError = true; + MaxConcurrency = Int32.MaxValue; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean Parallel + { + get; + set; + } + + public List Children + { + get + { + if (m_children == null) + { + m_children = new List(); + } + + return m_children; + } + } + + [DataMember(EmitDefaultValue = false)] + public TaskOrchestrationContainer Rollback + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public Boolean ContinueOnError + { + get; + set; + } + + [DataMember(EmitDefaultValue = true)] + public Int32 MaxConcurrency + { + get; + set; + } + + /// + /// Get additional specifications for this container. + /// + /// + /// This provides an extensibility for consumers of DT SDK to pass additional data + /// to Orchestrations. Each Orchestration is free to interpret this data as appropriate. + /// + public IDictionary Data + { + get + { + if (m_data == null) + { + m_data = new Dictionary(); + } + + return m_data; + } + } + + OrchestrationProcessType IOrchestrationProcess.ProcessType + { + get + { + return OrchestrationProcessType.Container; + } + } + + public IEnumerable GetJobs() + { + var containerQueue = new Queue(); + containerQueue.Enqueue(this); + + while (containerQueue.Count > 0) + { + var currentContainer = containerQueue.Dequeue(); + foreach (var item in currentContainer.Children) + { + switch (item.ItemType) + { + case TaskOrchestrationItemType.Container: + containerQueue.Enqueue((TaskOrchestrationContainer)item); + break; + + case TaskOrchestrationItemType.Job: + yield return item as TaskOrchestrationJob; + break; + } + } + } + } + + [DataMember(Name = "Children")] + private List m_children; + + [DataMember(Name = "Data", EmitDefaultValue = false)] + private IDictionary m_data; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItem.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItem.cs new file mode 100644 index 00000000000..d271834acd7 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItem.cs @@ -0,0 +1,24 @@ +using Newtonsoft.Json; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [KnownType(typeof(TaskOrchestrationContainer))] + [KnownType(typeof(TaskOrchestrationJob))] + [JsonConverter(typeof(TaskOrchestrationItemJsonConverter))] + public abstract class TaskOrchestrationItem + { + protected TaskOrchestrationItem(TaskOrchestrationItemType itemType) + { + this.ItemType = itemType; + } + + [DataMember] + public TaskOrchestrationItemType ItemType + { + get; + private set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemJsonConverter.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemJsonConverter.cs new file mode 100644 index 00000000000..f8ead84157c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemJsonConverter.cs @@ -0,0 +1,100 @@ +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; +using System; +using System.Reflection; + +namespace GitHub.DistributedTask.WebApi +{ + internal sealed class TaskOrchestrationItemJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(TaskOrchestrationItem).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + var contract = serializer.ContractResolver.ResolveContract(objectType) as JsonObjectContract; + if (contract == null) + { + return existingValue; + } + + JsonProperty property = contract.Properties.GetClosestMatchProperty("ItemType"); + if (property == null) + { + return existingValue; + } + + JToken itemTypeValue; + TaskOrchestrationItemType itemType; + JObject value = JObject.Load(reader); + if (!value.TryGetValue(property.PropertyName, out itemTypeValue)) + { + return existingValue; + } + else + { + if (itemTypeValue.Type == JTokenType.Integer) + { + itemType = (TaskOrchestrationItemType)(Int32)itemTypeValue; + } + else if (itemTypeValue.Type != JTokenType.String || + !Enum.TryParse((String)itemTypeValue, true, out itemType)) + { + return existingValue; + } + } + + Object newValue = null; + switch (itemType) + { + case TaskOrchestrationItemType.Container: + newValue = new TaskOrchestrationContainer(); + break; + + case TaskOrchestrationItemType.Job: + newValue = new TaskOrchestrationJob(); + break; + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + // The virtual method returns false for CanWrite so this should never be invoked + throw new NotSupportedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemType.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemType.cs new file mode 100644 index 00000000000..51755c3a505 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationItemType.cs @@ -0,0 +1,14 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskOrchestrationItemType + { + [EnumMember] + Container, + + [EnumMember] + Job, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationJob.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationJob.cs new file mode 100644 index 00000000000..fc67872155f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationJob.cs @@ -0,0 +1,166 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TaskOrchestrationJob : TaskOrchestrationItem + { + internal TaskOrchestrationJob() + : base(TaskOrchestrationItemType.Job) + { + this.ExecutionMode = JobExecutionModeTypes.Agent; + } + + public TaskOrchestrationJob( + Guid instanceId, + String name, + String refName, + string executionMode = JobExecutionModeTypes.Agent) + : base(TaskOrchestrationItemType.Job) + { + this.InstanceId = instanceId; + this.Name = name; + this.RefName = refName; + this.ExecutionMode = executionMode; + } + + private TaskOrchestrationJob(TaskOrchestrationJob jobToBeCloned) + : base(jobToBeCloned.ItemType) + { + this.InstanceId = jobToBeCloned.InstanceId; + this.Name = jobToBeCloned.Name; + this.RefName = jobToBeCloned.RefName; + this.ExecutionMode = jobToBeCloned.ExecutionMode; + this.ExecutionTimeout = jobToBeCloned.ExecutionTimeout; + + if (jobToBeCloned.ExecuteAs != null) + { + this.ExecuteAs = new IdentityRef + { + DisplayName = jobToBeCloned.ExecuteAs.DisplayName, + Id = jobToBeCloned.ExecuteAs.Id, + ImageUrl = jobToBeCloned.ExecuteAs.ImageUrl, + IsAadIdentity = jobToBeCloned.ExecuteAs.IsAadIdentity, + IsContainer = jobToBeCloned.ExecuteAs.IsContainer, + ProfileUrl = jobToBeCloned.ExecuteAs.ProfileUrl, + UniqueName = jobToBeCloned.ExecuteAs.UniqueName, + Url = jobToBeCloned.ExecuteAs.Url, + }; + } + + if (jobToBeCloned.m_demands != null) + { + m_demands = jobToBeCloned.Demands.Select(x => x.Clone()).ToList(); + } + + if (jobToBeCloned.m_variables != null) + { + m_variables = new Dictionary(jobToBeCloned.m_variables, StringComparer.OrdinalIgnoreCase); + } + + if (jobToBeCloned.m_tasks != null) + { + m_tasks = jobToBeCloned.m_tasks.Select(x => (TaskInstance)x.Clone()).ToList(); + } + } + + [DataMember] + public Guid InstanceId + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public String RefName + { + get; + set; + } + + [DataMember] + public string ExecutionMode + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public IdentityRef ExecuteAs + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TimeSpan? ExecutionTimeout + { + get; + set; + } + + public List Demands + { + get + { + if (m_demands == null) + { + m_demands = new List(); + } + + return m_demands; + } + } + + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_variables; + } + } + + public List Tasks + { + get + { + if (m_tasks == null) + { + m_tasks = new List(); + } + + return m_tasks; + } + } + + public TaskOrchestrationJob Clone() + { + return new TaskOrchestrationJob(this); + } + + [DataMember(Name = "Demands", EmitDefaultValue = false)] + private List m_demands; + + [DataMember(Name = "Variables", EmitDefaultValue = false)] + private IDictionary m_variables; + + [DataMember(Name = "Tasks", EmitDefaultValue = false)] + private List m_tasks; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationOwner.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationOwner.cs new file mode 100644 index 00000000000..ef6bde45114 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationOwner.cs @@ -0,0 +1,60 @@ +using GitHub.Services.WebApi; +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOrchestrationOwner : ICloneable + { + public TaskOrchestrationOwner() + { + } + + private TaskOrchestrationOwner(TaskOrchestrationOwner ownerToBeCloned) + { + this.Id = ownerToBeCloned.Id; + this.Name = ownerToBeCloned.Name; + this.m_links = ownerToBeCloned.Links.Clone(); + } + + [DataMember] + public Int32 Id + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + public ReferenceLinks Links + { + get + { + if (m_links == null) + { + m_links = new ReferenceLinks(); + } + return m_links; + } + } + + public TaskOrchestrationOwner Clone() + { + return new TaskOrchestrationOwner(this); + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + [DataMember(Name = "_links")] + private ReferenceLinks m_links; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlan.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlan.cs new file mode 100644 index 00000000000..11e1cd18f9b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlan.cs @@ -0,0 +1,145 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOrchestrationPlan : TaskOrchestrationPlanReference + { + [DataMember(EmitDefaultValue = false)] + public DateTime? StartTime + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public DateTime? FinishTime + { + get; + set; + } + + [DataMember] + public TaskOrchestrationPlanState State + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskResult? Result + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String ResultCode + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TimelineReference Timeline + { + get; + set; + } + + public PlanEnvironment Environment + { + get + { + return m_environment; + } + set + { + m_environment = value; + m_processEnvironment = value; + } + } + + public TaskOrchestrationContainer Implementation + { + get + { + return m_implementation; + } + set + { + m_process = value; + m_implementation = value; + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public IOrchestrationProcess Process + { + get + { + return m_process; + } + set + { + m_process = value; + m_implementation = value as TaskOrchestrationContainer; + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public IOrchestrationEnvironment ProcessEnvironment + { + get + { + return m_processEnvironment; + } + set + { + m_processEnvironment = value; + m_environment = value as PlanEnvironment; + } + } + + [DataMember(EmitDefaultValue = false)] + public Guid RequestedById + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid RequestedForId + { + get; + set; + } + + internal PlanTemplateType TemplateType + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public TaskLogReference InitializationLog + { + get; + set; + } + + // Currently these members are not serialized on the wire since that would technically be an API break for + // the 1.0 version. While additive, existing clients wouldn't understand it and could blow up. Until this + // public model is finalized we will not send this data over the wire and will not revision the API. + private IOrchestrationProcess m_process; + private IOrchestrationEnvironment m_processEnvironment; + + [DataMember(Name = "Environment", EmitDefaultValue = false)] + private PlanEnvironment m_environment; + + [DataMember(Name = "Implementation", EmitDefaultValue = false)] + private TaskOrchestrationContainer m_implementation; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanGroupsQueueMetrics.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanGroupsQueueMetrics.cs new file mode 100644 index 00000000000..ddd426e8dab --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanGroupsQueueMetrics.cs @@ -0,0 +1,23 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOrchestrationPlanGroupsQueueMetrics + { + [DataMember] + public PlanGroupStatus Status + { + get; + set; + } + + [DataMember] + public Int32 Count + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanReference.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanReference.cs new file mode 100644 index 00000000000..8955803167a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanReference.cs @@ -0,0 +1,86 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOrchestrationPlanReference + { + [DataMember] + public Guid ScopeIdentifier + { + get; + set; + } + + [DataMember] + public String PlanType + { + get; + set; + } + + [DataMember] + public Int32 Version + { + get; + set; + } + + [DataMember] + public Guid PlanId + { + get; + set; + } + + [DataMember] + public String PlanGroup + { + get; + set; + } + + [DataMember] + public Uri ArtifactUri + { + get; + set; + } + + [DataMember] + public Uri ArtifactLocation + { + get; + set; + } + + [IgnoreDataMember] + internal Int64 ContainerId + { + get; + set; + } + + [IgnoreDataMember] + public OrchestrationProcessType ProcessType + { + get; + internal set; + } + + [DataMember] + public TaskOrchestrationOwner Definition + { + get; + set; + } + + [DataMember] + public TaskOrchestrationOwner Owner + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanState.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanState.cs new file mode 100644 index 00000000000..8fc0b654ab9 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationPlanState.cs @@ -0,0 +1,20 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskOrchestrationPlanState + { + [EnumMember] + InProgress = 1, + + [EnumMember] + Queued = 2, + + [EnumMember] + Completed = 4, + + [EnumMember] + Throttled = 8 + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlan.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlan.cs new file mode 100644 index 00000000000..ee2ffeef231 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlan.cs @@ -0,0 +1,72 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOrchestrationQueuedPlan + { + [DataMember] + public Guid PlanId + { + get; + set; + } + + [DataMember] + public Guid ScopeIdentifier + { + get; + set; + } + + [DataMember] + public String PlanGroup + { + get; + set; + } + + [DataMember] + public Int32 QueuePosition + { + get; + set; + } + + [DataMember] + public Int32 PoolId + { + get; + set; + } + + [DataMember] + public DateTime QueueTime + { + get; + set; + } + + [DataMember] + public DateTime? AssignTime + { + get; + set; + } + + [DataMember] + public TaskOrchestrationOwner Definition + { + get; + set; + } + + [DataMember] + public TaskOrchestrationOwner Owner + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlanGroup.cs b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlanGroup.cs new file mode 100644 index 00000000000..fd5b57576c0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOrchestrationQueuedPlanGroup.cs @@ -0,0 +1,61 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOrchestrationQueuedPlanGroup + { + [DataMember] + public ProjectReference Project + { + get; + internal set; + } + + [DataMember] + public String PlanGroup + { + get; + internal set; + } + + [DataMember] + public Int32 QueuePosition + { + get; + internal set; + } + + [DataMember] + public List Plans + { + get + { + if (this._plans == null) + { + this._plans = new List(); + } + + return this._plans; + } + } + + [DataMember] + public TaskOrchestrationOwner Definition + { + get; + set; + } + + [DataMember] + public TaskOrchestrationOwner Owner + { + get; + set; + } + + private List _plans; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskOutputVariable.cs b/src/Sdk/DTWebApi/WebApi/TaskOutputVariable.cs new file mode 100644 index 00000000000..e2ba44d2c28 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskOutputVariable.cs @@ -0,0 +1,38 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskOutputVariable + { + public TaskOutputVariable() + { + } + + private TaskOutputVariable(TaskOutputVariable outputDefinitionToClone) + { + this.Name = outputDefinitionToClone.Name; + this.Description = outputDefinitionToClone.Description; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + public TaskOutputVariable Clone() + { + return new TaskOutputVariable(this); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskReference.cs b/src/Sdk/DTWebApi/WebApi/TaskReference.cs new file mode 100644 index 00000000000..5dfbdc3c97c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskReference.cs @@ -0,0 +1,87 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskReference : ITaskDefinitionReference + { + public TaskReference() + { + } + + protected TaskReference(TaskReference taskToBeCloned) + { + this.Id = taskToBeCloned.Id; + this.Name = taskToBeCloned.Name; + this.Version = taskToBeCloned.Version; + + if (taskToBeCloned.m_inputs != null) + { + m_inputs = new Dictionary(taskToBeCloned.m_inputs, StringComparer.OrdinalIgnoreCase); + } + } + + [DataMember] + public Guid Id + { + get; + set; + } + + [DataMember] + public String Name + { + get; + set; + } + + [DataMember] + public String Version + { + get; + set; + } + + public IDictionary Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_inputs; + } + } + + public virtual TaskReference Clone() + { + return new TaskReference(this); + } + + [OnDeserialized] + private void OnDeserialized(StreamingContext context) + { + SerializationHelper.Copy(ref m_serializedInputs, ref m_inputs, StringComparer.OrdinalIgnoreCase, true); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + SerializationHelper.Copy(ref m_inputs, ref m_serializedInputs); + } + + [OnSerialized] + private void OnSerialized(StreamingContext context) + { + m_serializedInputs = null; + } + + private IDictionary m_inputs; + + [DataMember(EmitDefaultValue = false, Name = "Inputs")] + private IDictionary m_serializedInputs; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskResourceIds.cs b/src/Sdk/DTWebApi/WebApi/TaskResourceIds.cs new file mode 100644 index 00000000000..24435a807d0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskResourceIds.cs @@ -0,0 +1,264 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [GenerateAllConstants] + public static class TaskResourceIds + { + public const String AreaId = "A85B8835-C1A1-4AAC-AE97-1C3D0BA72DBD"; + public const String AreaName = "distributedtask"; + + public static readonly Guid Agents = new Guid("{E298EF32-5878-4CAB-993C-043836571F42}"); + public const String AgentsResource = "agents"; + + public static readonly Guid AgentMessages = new Guid("{C3A054F6-7A8A-49C0-944E-3A8E5D7ADFD7}"); + public const String AgentMessagesResource = "messages"; + + public static readonly Guid AgentSessions = new Guid("{134E239E-2DF3-4794-A6F6-24F1F19EC8DC}"); + public const String AgentSessionsResource = "sessions"; + + public static readonly Guid AgentUpdates = new Guid("{8CC1B02B-AE49-4516-B5AD-4F9B29967C30}"); + public const String AgentUpdatesResource = "updates"; + + public static readonly Guid UserCapabilities = new Guid("{30BA3ADA-FEDF-4DA8-BBB5-DACF2F82E176}"); + public const String UserCapabilitiesResource = "usercapabilities"; + + public static readonly Guid AgentClouds = new Guid("{BFA72B3D-0FC6-43FB-932B-A7F6559F93B9}"); + public const String AgentCloudsResource = "agentclouds"; + + public static readonly Guid AgentCloudRequests = new Guid("{20189BD7-5134-49C2-B8E9-F9E856EEA2B2}"); + public const String AgentCloudRequestsResource = "requests"; + + public static readonly Guid AgentCloudRequestMessages = new Guid("{BD247656-4D13-49AF-80C1-1891BB057A93}"); + public const String AgentCloudRequestMessagesResource = "agentCloudRequestMessages"; + + public static readonly Guid AgentCloudRequestJob = new Guid("{662C9827-FEED-40F0-AE63-B0B8E88A58B8}"); + public const String AgentCloudRequestJobResource = "agentCloudRequestJob"; + + public static readonly Guid Packages = new Guid("{8FFCD551-079C-493A-9C02-54346299D144}"); + public const String PackagesResource = "packages"; + + public static readonly Guid AgentDownload = new Guid("{314EA24F-8331-4AF1-9FB6-CFC73A4CB5A8}"); + public const String AgentDownloadResource = "downloads"; + + public static readonly Guid Pools = new Guid("{A8C47E17-4D56-4A56-92BB-DE7EA7DC65BE}"); + public const String PoolsResource = "pools"; + + public static readonly Guid AgentCloudTypes = new Guid("{5932E193-F376-469D-9C3E-E5588CE12CB5}"); + public const String AgentCloudTypesResource = "agentcloudtypes"; + + public const String DeploymentPoolsResource = "deploymentPools"; + + public static readonly Guid DeploymentPoolsSummary = new Guid("{6525D6C6-258F-40E0-A1A9-8A24A3957625}"); + public const String DeploymentPoolsSummaryResource = "deploymentPoolsSummary"; + + public static readonly Guid PoolMaintenanceDefinitions = new Guid("{80572E16-58F0-4419-AC07-D19FDE32195C}"); + public const String PoolMaintenanceDefinitionsResource = "maintenancedefinitions"; + + public static readonly Guid PoolMaintenanceJobs = new Guid("{15E7AB6E-ABCE-4601-A6D8-E111FE148F46}"); + public const String PoolMaintenanceJobsResource = "maintenancejobs"; + + public static readonly Guid Queues = new Guid("900FA995-C559-4923-AAE7-F8424FE4FBEA"); + public const String QueuesResource = "queues"; + + public static readonly Guid DeploymentGroupAccessToken = new Guid("3D197BA2-C3E9-4253-882F-0EE2440F8174"); + public const String DeploymentGroupAccessTokenResource = "deploymentgroupaccesstoken"; + + public static readonly Guid DeploymentPoolAccessToken = new Guid("E077EE4A-399B-420B-841F-C43FBC058E0B"); + public const String DeploymentPoolAccessTokenResource = "deploymentpoolaccesstoken"; + + public const string DeploymentGroupsMetricsLocationIdString = "281C6308-427A-49E1-B83A-DAC0F4862189"; + public static readonly Guid DeploymentGroupsMetrics = new Guid(DeploymentGroupsMetricsLocationIdString); + public const String DeploymentGroupsMetricsResource = "deploymentgroupsmetrics"; + + public static readonly Guid DeploymentGroups = new Guid("083C4D89-AB35-45AF-AA11-7CF66895C53E"); + public const String DeploymentGroupsResource = "deploymentgroups"; + + public static readonly Guid DeploymentMachineGroups = new Guid("D4ADF50F-80C6-4AC8-9CA1-6E4E544286E9"); + public const String DeploymentMachineGroupsResource = "machinegroups"; + + public const string DeploymentMachinesLocationIdString = "6F6D406F-CFE6-409C-9327-7009928077E7"; + public static readonly Guid DeploymentMachines = new Guid(DeploymentMachinesLocationIdString); + public const string DeploymentMachineGroupMachinesLocationIdString = "966C3874-C347-4B18-A90C-D509116717FD"; + public static readonly Guid DeploymentMachineGroupMachines = new Guid(DeploymentMachineGroupMachinesLocationIdString); + public const String DeploymentMachinesResource = "machines"; + + public const string DeploymentTargetsLocationIdString = "2F0AA599-C121-4256-A5FD-BA370E0AE7B6"; + public static readonly Guid DeploymentTargets = new Guid(DeploymentTargetsLocationIdString); + public const String DeploymentTargetsResource = "targets"; + + public static readonly Guid DeploymentMachineGroupAccessToken = new Guid("F8C7C0DE-AC0D-469B-9CB1-C21F72D67693"); + public const String DeploymentMachineGroupAccessTokenResource = "machinegroupaccesstoken"; + + public static readonly Guid PoolRolesCompat = new Guid("{9E627AF6-3635-4DDF-A275-DCA904802338}"); + public const String PoolRolesCompatResource = "roles"; + + public static readonly Guid QueueRoles = new Guid("{B0C6D64D-C9FA-4946-B8DE-77DE623EE585}"); + public const String QueueRolesResource = "queueroles"; + + public static readonly Guid PoolRoles = new Guid("{381DD2BB-35CF-4103-AE8C-3C815B25763C}"); + public const string PoolRolesResource = "poolroles"; + + public static readonly Guid PoolMetadata = new Guid("{0D62F887-9F53-48B9-9161-4C35D5735B0F}"); + public const string PoolMetadataResource = "poolmetadata"; + + public static readonly Guid JobRequestsDeprecated = new Guid("{FC825784-C92A-4299-9221-998A02D1B54F}"); + public const String JobRequestsDeprecatedResource = "jobrequests"; + + public static readonly Guid AgentRequests = new Guid("{F5F81FFB-F396-498D-85B1-5ADA145E648A}"); + public const String AgentRequestsResource = "agentrequests"; + + public static readonly Guid DeploymentMachineJobRequests = new Guid("{A3540E5B-F0DC-4668-963B-B752459BE545}"); + public const String DeploymentMachineJobRequestsResource = "deploymentmachinejobrequests"; + + public static readonly Guid DeploymentTargetJobRequests = new Guid("{2FAC0BE3-8C8F-4473-AB93-C1389B08A2C9}"); + public const String DeploymentTargetJobRequestsResource = "deploymentTargetJobRequests"; + + public static readonly Guid DeploymentMachineMessages = new Guid("{91006AC4-0F68-4D82-A2BC-540676BD73CE}"); + public const String DeploymentMachineMessagesResource = "deploymentmachinemessages"; + + public static readonly Guid DeploymentTargetMessages = new Guid("{1C1A817F-F23D-41C6-BF8D-14B638F64152}"); + public const String DeploymentTargetMessagesResource = "deploymentTargetMessages"; + + public static readonly Guid Tasks = new Guid("{60AAC929-F0CD-4BC8-9CE4-6B30E8F1B1BD}"); + public const String TasksResource = "tasks"; + + public static readonly Guid TaskEndpoint = new Guid("{F223B809-8C33-4B7D-B53F-07232569B5D6}"); + public const String TaskEndpointResource = "endpoint"; + + public static readonly Guid TaskIcons = new Guid("{63463108-174D-49D4-B8CB-235EEA42A5E1}"); + public const String TaskIconsResource = "icon"; + + public static readonly Guid Logs = new Guid("{46F5667D-263A-4684-91B1-DFF7FDCF64E2}"); + public static readonly Guid Logs_Compat = new Guid("{15344176-9E77-4CF4-A7C3-8BC4D0A3C4EB}"); + public const String LogsResource = "logs"; + + public static readonly Guid Plans = new Guid("{5CECD946-D704-471E-A45F-3B4064FCFABA}"); + public static readonly Guid Plans_Compat = new Guid("{F8D10759-6E90-48BC-96B0-D19440116797}"); + public const String PlansResource = "plans"; + + public static readonly Guid JobInstances = new Guid("{0A1EFD25-ABDA-43BD-9629-6C7BDD2E0D60}"); + public const String JobInstancesResource = "jobinstances"; + + public static readonly Guid PlanEvents = new Guid("{557624AF-B29E-4C20-8AB0-0399D2204F3F}"); + public static readonly Guid PlanEvents_Compat = new Guid("{DFED02FB-DEEE-4039-A04D-AA21D0241995}"); + public const String PlanEventsResource = "events"; + + public const String PlanAttachmentsLocationIdString = "EB55E5D6-2F30-4295-B5ED-38DA50B1FC52"; + public static readonly Guid PlanAttachments = new Guid(PlanAttachmentsLocationIdString); + public const String AttachmentsLocationIdString = "7898F959-9CDF-4096-B29E-7F293031629E"; + public static readonly Guid Attachments = new Guid(AttachmentsLocationIdString); + public const String AttachmentsResource = "attachments"; + + public static readonly Guid Timelines = new Guid("{83597576-CC2C-453C-BEA6-2882AE6A1653}"); + public static readonly Guid Timelines_Compat = new Guid("{FFE38397-3A9D-4CA6-B06D-49303F287BA5}"); + public const String TimelinesResource = "timelines"; + + public static readonly Guid TimelineRecords = new Guid("{8893BC5B-35B2-4BE7-83CB-99E683551DB4}"); + public static readonly Guid TimelineRecords_Compat = new Guid("{50170D5D-F122-492F-9816-E2EF9F8D1756}"); + public const String TimelineRecordsResource = "records"; + + public static readonly Guid TimelineRecordFeeds = new Guid("{858983E4-19BD-4C5E-864C-507B59B58B12}"); + public static readonly Guid TimelineRecordFeeds_Compat = new Guid("{9AE056F6-D4E4-4D0C-BD26-AEE2A22F01F2}"); + public const String TimelineRecordFeedsResource = "feed"; + + public static readonly Guid ServiceEndpoints = new Guid("CA373C13-FEC3-4B30-9525-35A117731384"); + public const String ServiceEndpoints2LocationIdString = "DCA61D2F-3444-410A-B5EC-DB2FC4EFB4C5"; + public static readonly Guid ServiceEndpoints2 = new Guid(ServiceEndpoints2LocationIdString); + public const String ServiceEndpointsResource = "serviceendpoints"; + + public static readonly Guid ServiceEndpointTypes = new Guid("7c74af83-8605-45c1-a30b-7a05d5d7f8c1"); + public const String ServiceEndpointTypesResource = "serviceendpointtypes"; + + public static readonly Guid ServiceEndpointProxy = new Guid("e3a44534-7b94-4add-a053-8af449589c62"); + public const String ServiceEndpointProxy2LocationIdString = "F956A7DE-D766-43AF-81B1-E9E349245634"; + public static readonly Guid ServiceEndpointProxy2 = new Guid(ServiceEndpointProxy2LocationIdString); + public const String ServiceEndpointProxyResource = "serviceendpointproxy"; + + public static readonly Guid AzureSubscriptions = new Guid("BCD6189C-0303-471F-A8E1-ACB22B74D700"); + public const String AzureRmSubscriptionsResource = "azurermsubscriptions"; + + public static readonly Guid AzureManagementGroups = new Guid("39FE3BF2-7EE0-4198-A469-4A29929AFA9C"); + public const String AzureRmManagementGroupsResource = "azurermmanagementgroups"; + + public static readonly Guid TaskGroups = new Guid("6c08ffbf-dbf1-4f9a-94e5-a1cbd47005e7"); + public const string TaskGroupsResource = "taskgroups"; + + public static readonly Guid TaskGroupHistory = new Guid("100cc92a-b255-47fa-9ab3-e44a2985a3ac"); + public const string TaskGroupHistoryResource = "revisions"; + + public static readonly Guid ExtensionEvents = new Guid("{96c86d26-36fb-4649-9215-36e03a8bbc7d}"); + public const String ExtensionEventsResource = "extensionevents"; + public const String ExtensionPreInstallResource = "preinstall"; + + public static readonly Guid TaskHubLicense = new Guid("{F9F0F436-B8A1-4475-9041-1CCDBF8F0128}"); + public const String TaskHubLicenseResource = "hublicense"; + + public const String ResourceLimitsLocationIdString = "1F1F0557-C445-42A6-B4A0-0DF605A3A0F8"; + public static readonly Guid ResourceLimits = new Guid(ResourceLimitsLocationIdString); + public const String ResourceLimitsResource = "resourcelimits"; + + public const String ResourceUsageLocationIdString = "EAE1D376-A8B1-4475-9041-1DFDBE8F0143"; + public static readonly Guid ResourceUsage = new Guid(ResourceUsageLocationIdString); + public const String ResourceUsageResource = "resourceusage"; + + public static readonly Guid VariableGroups = new Guid("F5B09DD5-9D54-45A1-8B5A-1C8287D634CC"); + public const String VariableGroupsResource = "variablegroups"; + + public static readonly Guid VariableGroupsShare = new Guid("74455598-DEF7-499A-B7A3-A41D1C8225F8"); + public const String VariableGroupsShareResource = "variablegroupshare"; + + public static readonly Guid SecureFiles = new Guid("ADCFD8BC-B184-43BA-BD84-7C8C6A2FF421"); + public const String SecureFilesResource = "securefiles"; + + public const String PlanGroupsQueueLocationIdString = "0DD73091-3E36-4F43-B443-1B76DD426D84"; + public static readonly Guid PlanGroupsQueue = new Guid(PlanGroupsQueueLocationIdString); + public const String QueuedPlanGroupLocationIdString = "65FD0708-BC1E-447B-A731-0587C5464E5B"; + public static readonly Guid QueuedPlanGroup = new Guid(QueuedPlanGroupLocationIdString); + public const String PlanGroupsQueueResource = "plangroupsqueue"; + + public const String PlanGroupsQueueMetricsLocationIdString = "038FD4D5-CDA7-44CA-92C0-935843FEE1A7"; + public static readonly Guid PlanGroupsQueueMetrics = new Guid(PlanGroupsQueueMetricsLocationIdString); + public const String PlanGroupsQueueMetricsResource = "metrics"; + + public static readonly Guid VstsAadOAuth = new Guid("9C63205E-3A0F-42A0-AD88-095200F13607"); + public const string VstsAadOAuthResource = "vstsaadoauth"; + + public static readonly Guid InputValidation = new Guid("58475b1e-adaf-4155-9bc1-e04bf1fff4c2"); + public const string InputValidationResource = "inputvalidation"; + + public const string GetServiceEndpointExecutionHistoryLocationIdString = "3AD71E20-7586-45F9-A6C8-0342E00835AC"; + public static readonly Guid GetServiceEndpointExecutionHistory = new Guid(GetServiceEndpointExecutionHistoryLocationIdString); + public const string PostServiceEndpointExecutionHistoryLocationIdString = "11A45C69-2CCE-4ADE-A361-C9F5A37239EE"; + public static readonly Guid PostServiceEndpointExecutionHistory = new Guid(PostServiceEndpointExecutionHistoryLocationIdString); + + public const string ServiceEndpointExecutionHistoryResource = "executionhistory"; + + public static readonly Guid Environments = new Guid("8572B1FC-2482-47FA-8F74-7E3ED53EE54B"); + public const String EnvironmentsResource = "environments"; + + public static readonly Guid EnvironmentDeploymentExecutionHistory = new Guid("51bb5d21-4305-4ea6-9dbb-b7488af73334"); + public const String EnvironmentDeploymentExecutionHistoryResource = "environmentdeploymentRecords"; + + public const String KubernetesResourcesLocationIdString = "73FBA52F-15AB-42B3-A538-CE67A9223A04"; + public static readonly Guid KubernetesResourcesLocationId = new Guid(KubernetesResourcesLocationIdString); + public const String KubernetesResourcesResource = "kubernetes"; + + public const String VirtualMachineGroupsLocationIdString = "9e597901-4af7-4cc3-8d92-47d54db8ebfb"; + public static readonly Guid VirtualMachineGroupsLocationId = new Guid(VirtualMachineGroupsLocationIdString); + public const String VirtualMachineGroupsResource = "virtualmachinegroups"; + + public const String VirtualMachinesLocationIdString = "48700676-2BA5-4282-8EC8-083280D169C7"; + public static readonly Guid VirtualMachinesLocationId = new Guid(VirtualMachinesLocationIdString); + public const String VirtualMachinesResource = "virtualmachines"; + + public static readonly Guid YamlSchema = new Guid("{1F9990B9-1DBA-441F-9C2E-6485888C42B6}"); + public const String YamlSchemaResource = "yamlschema"; + + public const String CheckpointResourcesLocationIdString = "57835CC4-6FF0-4D62-8C27-4541BA97A094"; + public static readonly Guid CheckpointResourcesLocationId = new Guid(CheckpointResourcesLocationIdString); + public const String CheckpointResourcesResource = "references"; + + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskResult.cs b/src/Sdk/DTWebApi/WebApi/TaskResult.cs new file mode 100644 index 00000000000..c6367a95370 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskResult.cs @@ -0,0 +1,26 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TaskResult + { + [EnumMember] + Succeeded = 0, + + [EnumMember] + SucceededWithIssues = 1, + + [EnumMember] + Failed = 2, + + [EnumMember] + Canceled = 3, + + [EnumMember] + Skipped = 4, + + [EnumMember] + Abandoned = 5, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskRunsOnConstants.cs b/src/Sdk/DTWebApi/WebApi/TaskRunsOnConstants.cs new file mode 100644 index 00000000000..fdddf9e1019 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskRunsOnConstants.cs @@ -0,0 +1,23 @@ +using System.Collections.Generic; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [GenerateAllConstants] + public class TaskRunsOnConstants + { + public const string RunsOnAgent = "Agent"; + public const string RunsOnMachineGroup = "MachineGroup"; + public const string RunsOnDeploymentGroup = "DeploymentGroup"; + public const string RunsOnServer = "Server"; + + public static readonly List DefaultValue = new List { RunsOnAgent, RunsOnDeploymentGroup }; + + public static readonly List RunsOnAllTypes = new List + { + RunsOnAgent, + RunsOnDeploymentGroup, + RunsOnServer, + }; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskSourceDefinition.cs b/src/Sdk/DTWebApi/WebApi/TaskSourceDefinition.cs new file mode 100644 index 00000000000..698c0bb088c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskSourceDefinition.cs @@ -0,0 +1,35 @@ +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using CommonContracts = GitHub.DistributedTask.Common.Contracts; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskSourceDefinition : CommonContracts.TaskSourceDefinitionBase + { + public TaskSourceDefinition() + : base() + { + } + + private TaskSourceDefinition(TaskSourceDefinition inputDefinitionToClone) + : base(inputDefinitionToClone) + { + } + + private TaskSourceDefinition(TaskSourceDefinition inputDefinitionToClone, ISecuredObject securedObject) + : base(inputDefinitionToClone, securedObject) + { + } + + public TaskSourceDefinition Clone() + { + return new TaskSourceDefinition(this); + } + + public override CommonContracts.TaskSourceDefinitionBase Clone(ISecuredObject securedObject) + { + return new TaskSourceDefinition(this, securedObject); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskVersion.cs b/src/Sdk/DTWebApi/WebApi/TaskVersion.cs new file mode 100644 index 00000000000..b10d0a89a78 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskVersion.cs @@ -0,0 +1,182 @@ +using GitHub.Services.Common; +using System; +using System.Globalization; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TaskVersion : IComparable, IEquatable + { + public TaskVersion() + { + } + + public TaskVersion(String version) + { + Int32 major, minor, patch; + String semanticVersion; + + VersionParser.ParseVersion(version, out major, out minor, out patch, out semanticVersion); + Major = major; + Minor = minor; + Patch = patch; + + if (semanticVersion != null) + { + if (semanticVersion.Equals("test", StringComparison.OrdinalIgnoreCase)) + { + IsTest = true; + } + else + { + throw new ArgumentException("semVer"); + } + } + } + + private TaskVersion(TaskVersion taskVersionToClone) + { + this.IsTest = taskVersionToClone.IsTest; + this.Major = taskVersionToClone.Major; + this.Minor = taskVersionToClone.Minor; + this.Patch = taskVersionToClone.Patch; + } + + [DataMember] + public Int32 Major + { + get; + set; + } + + [DataMember] + public Int32 Minor + { + get; + set; + } + + [DataMember] + public Int32 Patch + { + get; + set; + } + + [DataMember] + public Boolean IsTest + { + get; + set; + } + + public TaskVersion Clone() + { + return new TaskVersion(this); + } + + public static implicit operator String(TaskVersion version) + { + return version.ToString(); + } + + public override String ToString() + { + String suffix = String.Empty; + if (IsTest) + { + suffix = "-test"; + } + + return String.Format(CultureInfo.InvariantCulture, "{0}.{1}.{2}{3}", Major, Minor, Patch, suffix); + } + + public override int GetHashCode() + { + return this.ToString().GetHashCode(); + } + + public Int32 CompareTo(TaskVersion other) + { + Int32 rc = Major.CompareTo(other.Major); + if (rc == 0) + { + rc = Minor.CompareTo(other.Minor); + if (rc == 0) + { + rc = Patch.CompareTo(other.Patch); + if (rc == 0 && this.IsTest != other.IsTest) + { + rc = this.IsTest ? -1 : 1; + } + } + } + + return rc; + } + + public Boolean Equals(TaskVersion other) + { + if (other is null) + { + return false; + } + + return this.CompareTo(other) == 0; + } + + public override bool Equals(object obj) + { + return Equals(obj as TaskVersion); + } + + public static Boolean operator ==(TaskVersion v1, TaskVersion v2) + { + if (v1 is null) + { + return v2 is null; + } + + return v1.Equals(v2); + } + + public static Boolean operator !=(TaskVersion v1, TaskVersion v2) + { + if (v1 is null) + { + return !(v2 is null); + } + + return !v1.Equals(v2); + } + + public static Boolean operator <(TaskVersion v1, TaskVersion v2) + { + ArgumentUtility.CheckForNull(v1, nameof(v1)); + ArgumentUtility.CheckForNull(v2, nameof(v2)); + return v1.CompareTo(v2) < 0; + } + + public static Boolean operator >(TaskVersion v1, TaskVersion v2) + { + ArgumentUtility.CheckForNull(v1, nameof(v1)); + ArgumentUtility.CheckForNull(v2, nameof(v2)); + return v1.CompareTo(v2) > 0; + } + + public static Boolean operator <=(TaskVersion v1, TaskVersion v2) + { + ArgumentUtility.CheckForNull(v1, nameof(v1)); + ArgumentUtility.CheckForNull(v2, nameof(v2)); + return v1.CompareTo(v2) <= 0; + } + + public static Boolean operator >=(TaskVersion v1, TaskVersion v2) + { + ArgumentUtility.CheckForNull(v1, nameof(v1)); + ArgumentUtility.CheckForNull(v2, nameof(v2)); + return v1.CompareTo(v2) >= 0; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TaskVersionSpec.cs b/src/Sdk/DTWebApi/WebApi/TaskVersionSpec.cs new file mode 100644 index 00000000000..a8617d2877b --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TaskVersionSpec.cs @@ -0,0 +1,239 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; + +namespace GitHub.DistributedTask.WebApi +{ + public sealed class TaskVersionSpec + { + /// + /// Gets or sets the major version component. + /// + public Int32? Major + { + get; + set; + } + + /// + /// Gets or sets the minor version component. + /// + public Int32? Minor + { + get; + set; + } + + /// + /// Gets or sets the patch version component. + /// + public Int32? Patch + { + get; + set; + } + + /// + /// Gets or sets a value locking the semantic version to test. + /// + public Boolean IsTest + { + get; + set; + } + + /// + /// Provides a string representation of the version specification. + /// + /// A printable string representation of a version specification + public override String ToString() + { + StringBuilder sb = new StringBuilder(); + if (this.Major == null) + { + sb.Append("*"); + } + else + { + sb.Append(this.Major.Value); + if (this.Minor != null) + { + sb.AppendFormat(CultureInfo.InvariantCulture, ".{0}", this.Minor.Value); + if (this.Patch != null) + { + sb.AppendFormat(CultureInfo.InvariantCulture, ".{0}", this.Patch.Value); + } + else + { + sb.Append(".*"); + } + } + else + { + sb.Append(".*"); + } + } + + if (this.IsTest) + { + sb.Append("-test"); + } + + return sb.ToString(); + } + + /// + /// Provides an explicit conversion constructor for converting from a String. + /// + /// The version specification string + /// A version specification object + /// When the provided version string is not valid + public static explicit operator TaskVersionSpec(String version) + { + return Parse(version); + } + + /// + /// Finds the closest version match for the current specification. If no match can be found then a null + /// value is returned. + /// + /// The list of versions available for matching + /// The version which matches the specification if found; otherwise, null + public TaskVersion Match(IEnumerable versions) + { + ArgumentUtility.CheckForNull(versions, nameof(versions)); + + // Do not evaluate until the end so we only actually iterate the list a single time. Since LINQ returns + // lazy evaluators from the Where method, we can avoid multiple iterations by leaving the variable + // as IEnumerable and performing the iteration after all clauses have been concatenated. + var matchedVersions = versions.Where(x => x.IsTest == this.IsTest); + if (this.Major != null) + { + matchedVersions = matchedVersions.Where(x => x.Major == this.Major); + if (this.Minor != null) + { + matchedVersions = matchedVersions.Where(x => x.Minor == this.Minor); + if (this.Patch != null) + { + matchedVersions = matchedVersions.Where(x => x.Patch == this.Patch); + } + } + } + + return matchedVersions.OrderByDescending(x => x).FirstOrDefault(); + } + + public TaskDefinition Match(IEnumerable definitions) + { + ArgumentUtility.CheckForNull(definitions, nameof(definitions)); + + // Do not evaluate until the end so we only actually iterate the list a single time. Since LINQ returns + // lazy evaluators from the Where method, we can avoid multiple iterations by leaving the variable + // as IEnumerable and performing the iteration after all clauses have been concatenated. + var matchedDefinitions = definitions.Where(x => x.Version.IsTest == this.IsTest); + if (this.Major != null) + { + matchedDefinitions = matchedDefinitions.Where(x => x.Version.Major == this.Major); + if (this.Minor != null) + { + matchedDefinitions = matchedDefinitions.Where(x => x.Version.Minor == this.Minor); + if (this.Patch != null) + { + matchedDefinitions = matchedDefinitions.Where(x => x.Version.Patch == this.Patch); + } + } + } + + return matchedDefinitions.OrderByDescending(x => x.Version).FirstOrDefault(); + } + + public static TaskVersionSpec Parse(String version) + { + TaskVersionSpec versionSpec; + if (!TryParse(version, out versionSpec)) + { + throw new ArgumentException(String.Format(CultureInfo.InvariantCulture, "The value {0} is not a valid version specification", version), "version"); + } + return versionSpec; + } + + public static Boolean TryParse( + String version, + out TaskVersionSpec versionSpec) + { + String[] versionComponents = version.Split(new[] { '.' }, StringSplitOptions.RemoveEmptyEntries); + if (versionComponents.Length < 1 || versionComponents.Length > 3) + { + versionSpec = null; + return false; + } + + Int32? major = null; + Int32? minor = null; + Int32? patch = null; + Boolean isTest = false; + String lastComponent = versionComponents[versionComponents.Length - 1]; + if (lastComponent.EndsWith("-test", StringComparison.OrdinalIgnoreCase)) + { + isTest = true; + versionComponents[versionComponents.Length - 1] = lastComponent.Remove(lastComponent.Length - "-test".Length); + } + + if (versionComponents.Length == 1) + { + if (!TryParseVersionComponent(version, "major", versionComponents[0], true, out major)) + { + versionSpec = null; + return false; + } + } + else if (versionComponents.Length == 2) + { + if (!TryParseVersionComponent(version, "major", versionComponents[0], false, out major) || + !TryParseVersionComponent(version, "minor", versionComponents[1], true, out minor)) + { + versionSpec = null; + return false; + } + } + else + { + if (!TryParseVersionComponent(version, "major", versionComponents[0], false, out major) || + !TryParseVersionComponent(version, "minor", versionComponents[1], false, out minor) || + !TryParseVersionComponent(version, "patch", versionComponents[2], true, out patch)) + { + versionSpec = null; + return false; + } + } + + versionSpec = new TaskVersionSpec { Major = major, Minor = minor, Patch = patch, IsTest = isTest }; + return true; + } + + private static Boolean TryParseVersionComponent( + String version, + String name, + String value, + Boolean allowStar, + out Int32? versionValue) + { + versionValue = null; + + Int32 parsedVersion; + if (Int32.TryParse(value, out parsedVersion)) + { + versionValue = parsedVersion; + } + else if (!allowStar || value != "*") + { + return false; + } + + return true; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/Timeline.cs b/src/Sdk/DTWebApi/WebApi/Timeline.cs new file mode 100644 index 00000000000..2c97132a9a8 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/Timeline.cs @@ -0,0 +1,77 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class Timeline : TimelineReference + { + public Timeline() + { + } + + public Timeline(Guid timelineId) + { + this.Id = timelineId; + } + + private Timeline(Timeline timelineToBeCloned) + { + this.ChangeId = timelineToBeCloned.ChangeId; + this.Id = timelineToBeCloned.Id; + this.LastChangedBy = timelineToBeCloned.LastChangedBy; + this.LastChangedOn = timelineToBeCloned.LastChangedOn; + this.Location = timelineToBeCloned.Location; + + if (timelineToBeCloned.m_records != null) + { + m_records = timelineToBeCloned.m_records.Select(x => x.Clone()).ToList(); + } + } + + [DataMember] + public Guid LastChangedBy + { + get; + internal set; + } + + [DataMember] + public DateTime LastChangedOn + { + get; + internal set; + } + + public List Records + { + get + { + if (m_records == null) + { + m_records = new List(); + } + return m_records; + } + } + + public Timeline Clone() + { + return new Timeline(this); + } + + [OnSerializing] + private void OnSerializing(StreamingContext context) + { + if (m_records?.Count == 0) + { + m_records = null; + } + } + + [DataMember(Name = "Records", EmitDefaultValue = false, Order = 4)] + private List m_records; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TimelineAttempt.cs b/src/Sdk/DTWebApi/WebApi/TimelineAttempt.cs new file mode 100644 index 00000000000..81d423f384a --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TimelineAttempt.cs @@ -0,0 +1,49 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TimelineAttempt + { + /// + /// Gets or sets the unique identifier for the record. + /// + [DataMember(EmitDefaultValue = false)] + public String Identifier + { + get; + set; + } + + /// + /// Gets or sets the attempt of the record. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Attempt + { + get; + set; + } + + /// + /// Gets or sets the timeline identifier which owns the record representing this attempt. + /// + [DataMember(EmitDefaultValue = false)] + public Guid TimelineId + { + get; + set; + } + + /// + /// Gets or sets the record identifier located within the specified timeline. + /// + [DataMember(EmitDefaultValue = false)] + public Guid RecordId + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TimelineRecord.cs b/src/Sdk/DTWebApi/WebApi/TimelineRecord.cs new file mode 100644 index 00000000000..5c5334f2f22 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TimelineRecord.cs @@ -0,0 +1,307 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TimelineRecord + { + public TimelineRecord() + { + this.Attempt = 1; + } + + private TimelineRecord(TimelineRecord recordToBeCloned) + { + this.Attempt = recordToBeCloned.Attempt; + this.ChangeId = recordToBeCloned.ChangeId; + this.CurrentOperation = recordToBeCloned.CurrentOperation; + this.FinishTime = recordToBeCloned.FinishTime; + this.Id = recordToBeCloned.Id; + this.Identifier = recordToBeCloned.Identifier; + this.LastModified = recordToBeCloned.LastModified; + this.Location = recordToBeCloned.Location; + this.Name = recordToBeCloned.Name; + this.Order = recordToBeCloned.Order; + this.ParentId = recordToBeCloned.ParentId; + this.PercentComplete = recordToBeCloned.PercentComplete; + this.RecordType = recordToBeCloned.RecordType; + this.Result = recordToBeCloned.Result; + this.ResultCode = recordToBeCloned.ResultCode; + this.StartTime = recordToBeCloned.StartTime; + this.State = recordToBeCloned.State; + this.TimelineId = recordToBeCloned.TimelineId; + this.WorkerName = recordToBeCloned.WorkerName; + this.RefName = recordToBeCloned.RefName; + this.ErrorCount = recordToBeCloned.ErrorCount; + this.WarningCount = recordToBeCloned.WarningCount; + + if (recordToBeCloned.Log != null) + { + this.Log = new TaskLogReference + { + Id = recordToBeCloned.Log.Id, + Location = recordToBeCloned.Log.Location, + }; + } + + if (recordToBeCloned.Details != null) + { + this.Details = new TimelineReference + { + ChangeId = recordToBeCloned.Details.ChangeId, + Id = recordToBeCloned.Details.Id, + Location = recordToBeCloned.Details.Location, + }; + } + + if (recordToBeCloned.Task != null) + { + this.Task = recordToBeCloned.Task.Clone(); + } + + if (recordToBeCloned.m_issues?.Count> 0) + { + this.Issues.AddRange(recordToBeCloned.Issues.Select(i => i.Clone())); + } + + if (recordToBeCloned.m_previousAttempts?.Count > 0) + { + this.PreviousAttempts.AddRange(recordToBeCloned.PreviousAttempts); + } + + if (recordToBeCloned.m_variables?.Count > 0) + { + this.m_variables = recordToBeCloned.Variables.ToDictionary(k => k.Key, v => v.Value.Clone()); + } + } + + [DataMember(Order = 1)] + public Guid Id + { + get; + set; + } + + [IgnoreDataMember] + public Guid? TimelineId + { + get; + set; + } + + [DataMember(Order = 2)] + public Guid? ParentId + { + get; + set; + } + + [DataMember(Name = "Type", Order = 3)] + public String RecordType + { + get; + set; + } + + [DataMember(Order = 4)] + public String Name + { + get; + set; + } + + [DataMember(Order = 5)] + public DateTime? StartTime + { + get; + set; + } + + [DataMember(Order = 6)] + public DateTime? FinishTime + { + get; + set; + } + + [DataMember(Order = 7)] + public String CurrentOperation + { + get; + set; + } + + [DataMember(Order = 8)] + public Int32? PercentComplete + { + get; + set; + } + + [DataMember(Order = 9)] + public TimelineRecordState? State + { + get; + set; + } + + [DataMember(Order = 10)] + public TaskResult? Result + { + get; + set; + } + + [DataMember(Order = 11)] + public String ResultCode + { + get; + set; + } + + [DataMember(Order = 12)] + public Int32 ChangeId + { + get; + set; + } + + [DataMember(Order = 13)] + public DateTime LastModified + { + get; + set; + } + + [DataMember(Order = 14)] + public String WorkerName + { + get; + set; + } + + [DataMember(Order = 15, EmitDefaultValue = false)] + public Int32? Order + { + get; + set; + } + + [DataMember(Order = 16, EmitDefaultValue = false)] + public String RefName + { + get; + set; + } + + [DataMember(Order = 20)] + public TaskLogReference Log + { + get; + set; + } + + [DataMember(Order = 30)] + public TimelineReference Details + { + get; + set; + } + + [DataMember(Order = 40)] + public Int32? ErrorCount + { + get; + set; + } + + [DataMember(Order = 50)] + public Int32? WarningCount + { + get; + set; + } + + public List Issues + { + get + { + if (m_issues == null) + { + m_issues = new List(); + } + return m_issues; + } + } + + [DataMember(EmitDefaultValue = false, Order = 70)] + public TaskReference Task + { + get; + set; + } + + [DataMember(Order = 100)] + public Uri Location + { + get; + set; + } + + [DataMember(Order = 130)] + public Int32 Attempt + { + get; + set; + } + + [DataMember(Order = 131)] + public String Identifier + { + get; + set; + } + + public IList PreviousAttempts + { + get + { + if (m_previousAttempts == null) + { + m_previousAttempts = new List(); + } + return m_previousAttempts; + } + } + + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + return m_variables; + } + } + + public TimelineRecord Clone() + { + return new TimelineRecord(this); + } + + [DataMember(Name = "Issues", EmitDefaultValue = false, Order = 60)] + private List m_issues; + + [DataMember(Name = "Variables", EmitDefaultValue = false, Order = 80)] + private Dictionary m_variables; + + [DataMember(Name = "PreviousAttempts", EmitDefaultValue = false, Order = 120)] + private List m_previousAttempts; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TimelineRecordFeedLinesWrapper.cs b/src/Sdk/DTWebApi/WebApi/TimelineRecordFeedLinesWrapper.cs new file mode 100644 index 00000000000..c628852bdbc --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TimelineRecordFeedLinesWrapper.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public sealed class TimelineRecordFeedLinesWrapper + { + public TimelineRecordFeedLinesWrapper() + { + } + + public TimelineRecordFeedLinesWrapper(Guid stepId, IList lines) + { + this.StepId = stepId; + this.Value = lines.ToList(); + this.Count = lines.Count; + } + + [DataMember(Order = 0)] + public Int32 Count { get; private set; } + + [DataMember] + public List Value + { + get; private set; + } + + [DataMember(EmitDefaultValue = false)] + public Guid StepId { get; set; } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TimelineRecordState.cs b/src/Sdk/DTWebApi/WebApi/TimelineRecordState.cs new file mode 100644 index 00000000000..5b08aff2456 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TimelineRecordState.cs @@ -0,0 +1,17 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public enum TimelineRecordState + { + [EnumMember] + Pending, + + [EnumMember] + InProgress, + + [EnumMember] + Completed, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/TimelineReference.cs b/src/Sdk/DTWebApi/WebApi/TimelineReference.cs new file mode 100644 index 00000000000..7b1d0703816 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/TimelineReference.cs @@ -0,0 +1,34 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class TimelineReference + { + public TimelineReference() + { + } + + [DataMember(Order = 1)] + public Guid Id + { + get; + set; + } + + [DataMember(Order = 2)] + public Int32 ChangeId + { + get; + set; + } + + [DataMember(Order = 3)] + public Uri Location + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ValidationItem.cs b/src/Sdk/DTWebApi/WebApi/ValidationItem.cs new file mode 100644 index 00000000000..c5c45341b39 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ValidationItem.cs @@ -0,0 +1,60 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [KnownType(typeof(ExpressionValidationItem))] + [KnownType(typeof(InputValidationItem))] + [JsonConverter(typeof(ValidationItemJsonConverter))] + public class ValidationItem + { + protected ValidationItem(String type) + { + this.Type = type; + } + + /// + /// Type of validation item + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + private set; + } + + /// + /// Value to validate. + /// The conditional expression to validate for the input for "expression" type + /// Eg:eq(variables['Build.SourceBranch'], 'refs/heads/master');eq(value, 'refs/heads/master') + /// + [DataMember(EmitDefaultValue = false)] + public String Value + { + get; + set; + } + + /// + /// Tells whether the current input is valid or not + /// + [DataMember(EmitDefaultValue = false)] + public Boolean? IsValid + { + get; + set; + } + + /// + /// Reason for input validation failure + /// + [DataMember(EmitDefaultValue = false)] + public String Reason + { + get; + set; + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ValidationItemJsonConverter.cs b/src/Sdk/DTWebApi/WebApi/ValidationItemJsonConverter.cs new file mode 100644 index 00000000000..c3ee45c7680 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ValidationItemJsonConverter.cs @@ -0,0 +1,102 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + internal sealed class ValidationItemJsonConverter : VssSecureJsonConverter + { + public override Boolean CanConvert(Type objectType) + { + return typeof(ValidationItem).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + if (reader.TokenType != JsonToken.StartObject) + { + return null; + } + + Object newValue = null; + + if (objectType == typeof(ExpressionValidationItem)) + { + newValue = new ExpressionValidationItem(); + } + else if (objectType == typeof(InputValidationItem)) + { + newValue = new InputValidationItem(); + } + + JObject value = JObject.Load(reader); + + if (newValue == null) + { + var contract = serializer.ContractResolver.ResolveContract(objectType) as JsonObjectContract; + if (contract == null) + { + return existingValue; + } + + JsonProperty property = contract.Properties.GetClosestMatchProperty("Type"); + if (property == null) + { + return existingValue; + } + + JToken itemTypeValue; + String itemType = InputValidationTypes.Expression; + if (value.TryGetValue(property.PropertyName, out itemTypeValue) + && itemTypeValue.Type == JTokenType.String) + { + itemType = (String)itemTypeValue; + } + + switch (itemType) + { + case InputValidationTypes.Expression: + newValue = new ExpressionValidationItem(); + break; + case InputValidationTypes.Input: + newValue = new InputValidationItem(); + break; + } + } + + if (value != null) + { + using (JsonReader objectReader = value.CreateReader()) + { + serializer.Populate(objectReader, newValue); + } + } + + return newValue; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + // The virtual method returns false for CanWrite so this should never be invoked + throw new NotSupportedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ValidationRequest.cs b/src/Sdk/DTWebApi/WebApi/ValidationRequest.cs new file mode 100644 index 00000000000..d50009aac38 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ValidationRequest.cs @@ -0,0 +1,26 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + public class InputValidationRequest + { + public IDictionary Inputs + { + get + { + if (m_inputs == null) + { + m_inputs = new Dictionary(StringComparer.Ordinal); + } + + return m_inputs; + } + } + + [DataMember(Name = "Inputs")] + private Dictionary m_inputs; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/ValidationTypes.cs b/src/Sdk/DTWebApi/WebApi/ValidationTypes.cs new file mode 100644 index 00000000000..7c6391b185e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/ValidationTypes.cs @@ -0,0 +1,12 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [GenerateAllConstants] + public static class InputValidationTypes + { + public const String Expression = "expression"; + public const String Input = "input"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroup.cs b/src/Sdk/DTWebApi/WebApi/VariableGroup.cs new file mode 100644 index 00000000000..8f6bb2f4a6c --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroup.cs @@ -0,0 +1,244 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// A variable group is a collection of related variables. + /// + [JsonConverter(typeof(VariableGroupJsonConverter))] + [DataContract] + public class VariableGroup + { + public VariableGroup() + { + } + + private VariableGroup(VariableGroup group) + { + this.Id = group.Id; + this.Type = group.Type; + this.Name = group.Name; + this.Description = group.Description; + this.ProviderData = group.ProviderData; + this.CreatedBy = group.CreatedBy; + this.CreatedOn = group.CreatedOn; + this.ModifiedBy = group.ModifiedBy; + this.ModifiedOn = group.ModifiedOn; + this.IsShared = group.IsShared; + this.Variables = group.Variables.ToDictionary(x => x.Key, x => x.Value.Clone()); + } + + /// + /// Gets or sets id of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public Int32 Id + { + get; + set; + } + + /// + /// Gets or sets type of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// Gets or sets name of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Gets or sets description of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// Gets or sets provider data. + /// + [DataMember(EmitDefaultValue = false)] + public VariableGroupProviderData ProviderData + { + get; + set; + } + + /// + /// Gets or sets the identity who created the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef CreatedBy + { + get; + set; + } + + /// + /// Gets or sets the time when variable group was created. + /// + [DataMember(EmitDefaultValue = false)] + public DateTime CreatedOn + { + get; + set; + } + + /// + /// Gets or sets the identity who modified the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public IdentityRef ModifiedBy + { + get; + set; + } + + /// + /// Gets or sets the time when variable group was modified + /// + [DataMember(EmitDefaultValue = false)] + public DateTime ModifiedOn + { + get; + set; + } + + /// + /// Indicates whether variable group is shared with other projects or not. + /// + [DataMember(EmitDefaultValue = true)] + public Boolean IsShared + { + get; + set; + } + + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_variables; + } + set + { + if (value == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + else + { + m_variables = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + } + + public VariableGroup Clone() + { + return new VariableGroup(this); + } + + /// + /// Gets or sets variables contained in the variable group. + /// + [DataMember(EmitDefaultValue = false, Name = "Variables")] + private Dictionary m_variables; + } + + internal sealed class VariableGroupJsonConverter : VssSecureJsonConverter + { + public override Boolean CanRead + { + get + { + return true; + } + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(VariableGroup).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + var variableGroupJsonObject = JObject.Load(reader); + var variablesJsonObject = variableGroupJsonObject.GetValue("Variables", StringComparison.OrdinalIgnoreCase); + var providerDataJsonObject = variableGroupJsonObject.GetValue("ProviderData", StringComparison.OrdinalIgnoreCase); + + String variablesJson = null; + if (variablesJsonObject != null) + { + variablesJson = variablesJsonObject.ToString(); + } + + String providerDataJson = null; + if (providerDataJsonObject != null) + { + providerDataJson = providerDataJsonObject.ToString(); + } + + VariableGroup variableGroup = new VariableGroup(); + using (var objectReader = variableGroupJsonObject.CreateReader()) + { + serializer.Populate(objectReader, variableGroup); + } + + if (String.IsNullOrEmpty(variableGroup.Type)) + { + // To handle backward compat with clients making api calls without type + variableGroup.Type = VariableGroupType.Vsts; + } + + variableGroup.PopulateVariablesAndProviderData(variablesJson, providerDataJson); + + return variableGroup; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroupActionFilter.cs b/src/Sdk/DTWebApi/WebApi/VariableGroupActionFilter.cs new file mode 100644 index 00000000000..d5edb907ea0 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroupActionFilter.cs @@ -0,0 +1,19 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [Flags] + [DataContract] + public enum VariableGroupActionFilter + { + [EnumMember] + None = 0, + + [EnumMember] + Manage = 2, + + [EnumMember] + Use = 16, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroupParameters.cs b/src/Sdk/DTWebApi/WebApi/VariableGroupParameters.cs new file mode 100644 index 00000000000..c455cdfe419 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroupParameters.cs @@ -0,0 +1,159 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using System.Text; +using System.Threading.Tasks; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + [JsonConverter(typeof(VariableGroupParametersJsonConverter))] + [DataContract] + public class VariableGroupParameters + { + /// + /// Sets type of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Type + { + get; + set; + } + + /// + /// Sets name of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + /// + /// Sets description of the variable group. + /// + [DataMember(EmitDefaultValue = false)] + public String Description + { + get; + set; + } + + /// + /// Sets provider data. + /// + [DataMember(EmitDefaultValue = false)] + public VariableGroupProviderData ProviderData + { + get; + set; + } + + public IDictionary Variables + { + get + { + if (m_variables == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return m_variables; + } + set + { + if (value == null) + { + m_variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + else + { + m_variables = new Dictionary(value, StringComparer.OrdinalIgnoreCase); + } + } + } + + /// + /// Sets variables contained in the variable group. + /// + [DataMember(EmitDefaultValue = false, Name = "Variables")] + private Dictionary m_variables; + } + + internal sealed class VariableGroupParametersJsonConverter : VssSecureJsonConverter + { + public override Boolean CanRead + { + get + { + return true; + } + } + + public override Boolean CanWrite + { + get + { + return false; + } + } + + public override Boolean CanConvert(Type objectType) + { + return typeof(VariableGroupParameters).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override Object ReadJson( + JsonReader reader, + Type objectType, + Object existingValue, + JsonSerializer serializer) + { + var variableGroupJsonObject = JObject.Load(reader); + var variablesJsonObject = variableGroupJsonObject.GetValue("Variables", StringComparison.OrdinalIgnoreCase); + var providerDataJsonObject = variableGroupJsonObject.GetValue("ProviderData", StringComparison.OrdinalIgnoreCase); + + String variablesJson = null; + if (variablesJsonObject != null) + { + variablesJson = variablesJsonObject.ToString(); + } + + String providerDataJson = null; + if (providerDataJsonObject != null) + { + providerDataJson = providerDataJsonObject.ToString(); + } + + VariableGroupParameters variableGroupParameters = new VariableGroupParameters(); + using (var objectReader = variableGroupJsonObject.CreateReader()) + { + serializer.Populate(objectReader, variableGroupParameters); + } + + if (String.IsNullOrEmpty(variableGroupParameters.Type)) + { + // To handle backward compat with clients making api calls without type + variableGroupParameters.Type = VariableGroupType.Vsts; + } + + variableGroupParameters.PopulateVariablesAndProviderData(variablesJson, providerDataJson); + + return variableGroupParameters; + } + + public override void WriteJson( + JsonWriter writer, + Object value, + JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroupProviderData.cs b/src/Sdk/DTWebApi/WebApi/VariableGroupProviderData.cs new file mode 100644 index 00000000000..1f90d1310c7 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroupProviderData.cs @@ -0,0 +1,13 @@ +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Defines provider data of the variable group. + /// + [KnownType(typeof(AzureKeyVaultVariableGroupProviderData))] + [DataContract] + public class VariableGroupProviderData + { + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroupQueryOrder.cs b/src/Sdk/DTWebApi/WebApi/VariableGroupQueryOrder.cs new file mode 100644 index 00000000000..e8736c18eaa --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroupQueryOrder.cs @@ -0,0 +1,29 @@ +// -------------------------------------------------------------------------------------------------------------------- +// +// 2012-2023, All rights reserved. +// +// -------------------------------------------------------------------------------------------------------------------- + +namespace GitHub.DistributedTask.WebApi +{ + using System.Runtime.Serialization; + + /// + /// Specifies the desired ordering of variableGroups. + /// + [DataContract] + public enum VariableGroupQueryOrder + { + /// + /// Order by id ascending. + /// + [EnumMember] + IdAscending = 0, + + /// + /// Order by id descending. + /// + [EnumMember] + IdDescending = 1, + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroupType.cs b/src/Sdk/DTWebApi/WebApi/VariableGroupType.cs new file mode 100644 index 00000000000..8c7e5f40dad --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroupType.cs @@ -0,0 +1,12 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.DistributedTask.WebApi +{ + [GenerateAllConstants] + public static class VariableGroupType + { + public const String Vsts = "Vsts"; + public const String AzureKeyVault = "AzureKeyVault"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableGroupUtility.cs b/src/Sdk/DTWebApi/WebApi/VariableGroupUtility.cs new file mode 100644 index 00000000000..1de79ddf89f --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableGroupUtility.cs @@ -0,0 +1,318 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using GitHub.Services.WebApi; + +namespace GitHub.DistributedTask.WebApi +{ + /// + /// Utility class to perform operations on Variable groups. + /// + public static class VariableGroupUtility + { + public static VariableValue Clone(this VariableValue value) + { + if (keyVaultVariableType == value.GetType()) + { + return new AzureKeyVaultVariableValue((AzureKeyVaultVariableValue)value); + } + + return new VariableValue(value); + } + + public static void PopulateVariablesAndProviderData(this VariableGroup group, String variablesJson, String providerDataJson) + { + switch (group.Type) + { + case VariableGroupType.Vsts: + if (variablesJson != null) + { + group.Variables = JsonUtility.FromString>(variablesJson); + } + + if (providerDataJson != null) + { + group.ProviderData = JsonUtility.FromString(providerDataJson); + } + + break; + + case VariableGroupType.AzureKeyVault: + if (variablesJson != null) + { + var azureKeyVaultVariableValues = JsonUtility.FromString>(variablesJson); + if (azureKeyVaultVariableValues != null) + { + foreach (var azureKeyVaultVariableValue in azureKeyVaultVariableValues) + { + group.Variables[azureKeyVaultVariableValue.Key] = azureKeyVaultVariableValue.Value; + } + } + } + + if (providerDataJson != null) + { + group.ProviderData = JsonUtility.FromString(providerDataJson); + } + + break; + } + } + + public static void PopulateVariablesAndProviderData(this VariableGroupParameters variableGroupParameters, String variablesJson, String providerDataJson) + { + switch (variableGroupParameters.Type) + { + case VariableGroupType.Vsts: + if (variablesJson != null) + { + variableGroupParameters.Variables = JsonUtility.FromString>(variablesJson); + } + + if (providerDataJson != null) + { + variableGroupParameters.ProviderData = JsonUtility.FromString(providerDataJson); + } + + break; + + case VariableGroupType.AzureKeyVault: + if (variablesJson != null) + { + var azureKeyVaultVariableValues = JsonUtility.FromString>(variablesJson); + if (azureKeyVaultVariableValues != null) + { + foreach (var azureKeyVaultVariableValue in azureKeyVaultVariableValues) + { + variableGroupParameters.Variables[azureKeyVaultVariableValue.Key] = azureKeyVaultVariableValue.Value; + } + } + } + + if (providerDataJson != null) + { + variableGroupParameters.ProviderData = JsonUtility.FromString(providerDataJson); + } + + break; + } + } + + /// + /// Get list of cloned variable groups + /// + /// + /// + public static IList CloneVariableGroups(IList source) + { + var clonedVariableGroups = new List(); + if (source == null) + { + return clonedVariableGroups; + } + + foreach (var group in source) + { + if (group != null) + { + clonedVariableGroups.Add(group.Clone()); + } + } + + return clonedVariableGroups; + } + + /// + /// Replace secret values in group variables with null + /// + /// Variable groups to be cleared for secret variables + /// List of cleared variable groups + public static IList ClearSecrets(IList variableGroups) + { + var groups = new List(); + + if (variableGroups == null) + { + return groups; + } + + foreach (var group in variableGroups) + { + if (group != null) + { + var clearedGroup = group.Clone(); + + // Replacing secret variable's value with null + foreach (var variable in clearedGroup.Variables) + { + if (variable.Value != null && variable.Value.IsSecret) + { + variable.Value.Value = null; + } + } + + groups.Add(clearedGroup); + } + } + + return groups; + } + + /// + /// Replace all secrets in variables with null + /// + /// Variable set + /// Dictionary of variables + public static IDictionary ClearSecrets(IDictionary variables) + { + var dictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (variables == null) + { + return dictionary; + } + + foreach (var kvp in variables) + { + if (kvp.Value != null) + { + var clonedValue = kvp.Value.Clone(); + + if (kvp.Value.IsSecret) + { + clonedValue.Value = null; + } + + dictionary[kvp.Key] = clonedValue; + } + } + + return dictionary; + } + + /// + /// Check if any variable group has variable with secret value + /// + /// Variable groups to check if contains any secret variable with value. + /// Result + public static bool HasSecretWithValue(IList variableGroups) + { + if (variableGroups == null || variableGroups.Count == 0) + { + return false; + } + + foreach (var group in variableGroups) + { + if (group != null && HasSecretWithValue(group.Variables)) + { + return true; + } + } + + return false; + } + + /// + /// Check if Variables has any secret value + /// + /// Variable set to check for any secret value + /// + public static bool HasSecretWithValue(IDictionary variables) + { + if (variables == null || variables.Count == 0) + { + return false; + } + + return variables.Any(s => s.Value != null && + s.Value.IsSecret && + !String.IsNullOrEmpty(s.Value.Value)); + } + + /// + /// Check if any secret variable exists in variable group + /// + /// Variable groups to check if contains any secret variable + /// Result + public static bool HasSecret(IList variableGroups) + { + if (variableGroups == null || variableGroups.Count == 0) + { + return false; + } + + foreach (var group in variableGroups) + { + if (group != null && HasSecret(group.Variables)) + { + return true; + } + } + + return false; + } + + /// + /// Check if variable set contains any secret variable + /// + /// Variable set to be checked for secret variable + /// + public static bool HasSecret(IDictionary variables) + { + if (variables != null) + { + return variables.Any(v => v.Value != null && v.Value.IsSecret); + } + + return false; + } + + /// + /// Copies secrets from source variable groups to target variable groups + /// + /// Source variable groups + /// Target variable groups + /// + public static void FillSecrets( + IList sourceGroups, + IList targetGroups) + { + if (sourceGroups == null || sourceGroups.Count == 0) + { + return; + } + + if (targetGroups == null) + { + throw new ArgumentNullException("targetGroups"); + } + + foreach (var sourceGroup in sourceGroups) + { + var targetGroup = targetGroups.FirstOrDefault(group => group.Id == sourceGroup.Id); + + if (targetGroup != null) + { + if (sourceGroup.Variables == null || sourceGroup.Variables.Count == 0) + { + // nothing to fill + continue; + } + + if (targetGroup.Variables == null) + { + throw new ArgumentNullException(nameof(targetGroup.Variables)); + } + + foreach (var variable in sourceGroup.Variables.Where(x => x.Value.IsSecret)) + { + targetGroup.Variables[variable.Key] = variable.Value.Clone(); + } + } + } + } + + private static Type keyVaultVariableType = typeof(AzureKeyVaultVariableValue); + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableUtility.cs b/src/Sdk/DTWebApi/WebApi/VariableUtility.cs new file mode 100644 index 00000000000..ae537b47ce4 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableUtility.cs @@ -0,0 +1,321 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; +using GitHub.DistributedTask.Pipelines; +using GitHub.Services.WebApi; +using Newtonsoft.Json.Linq; + +namespace GitHub.DistributedTask.WebApi +{ + public static class VariableUtility + { + public static EnableAccessTokenType GetEnableAccessTokenType(IDictionary variables) + { + EnableAccessTokenType type; + if (variables != null && + variables.TryGetValue(WellKnownDistributedTaskVariables.EnableAccessToken, out VariableValue enableVariable) && + enableVariable != null) + { + Enum.TryParse(enableVariable.Value, true, out type); + } + else + { + type = EnableAccessTokenType.None; + } + + return type; + } + + public static Boolean IsVariable(String value) + { + return s_variableReferenceRegex.Value.IsMatch(value); + } + + /// + /// Replaces variables by recursively cloning tokens in a JObject or JArray by + /// Walks tokens and uses ExpandVariables(string, vars) to resolve all string tokens + /// + /// root token must be a JObject or JArray + /// key value variables to replace in the $(xxx) format + /// root token of cloned tree + public static JToken ExpandVariables(JToken token, IDictionary replacementDictionary, bool useMachineVariables = true) + { + var mapFuncs = new Dictionary> + { + { + JTokenType.String, + (t) => VariableUtility.ExpandVariables(t.ToString(), replacementDictionary, useMachineVariables) + } + }; + + return token.Map(mapFuncs); + } + + public static JToken ExpandVariables( + JToken token, + VariablesDictionary additionalVariableReplacements, + Boolean useMachineVariables) + { + return ExpandVariables(token, (IDictionary)additionalVariableReplacements, useMachineVariables); + } + + /// + /// Replaces multiple variable sets by recursively cloning tokens in a JObject or JArray. + /// Walks tokens and uses ExpandVariables(string, vars) for each set of variables on all string tokens + /// + /// root token must be a JObject or JArray + /// list of replacement key value pairs in the $(xxx) format + /// root token of cloned tree + public static JToken ExpandVariables(JToken token, IList> replacementsList) + { + var mapFuncs = new Dictionary> + { + { + JTokenType.String, + (t) => replacementsList.Aggregate(t, (current, replacementVariables) => ExpandVariables(current.ToString(), replacementVariables)) + } + }; + + return token.Map(mapFuncs); + } + + /// + /// An overload method for ExpandVariables + /// Expand variables in the input provided using the dictionary and the machine's environment variables + /// + public static String ExpandVariables(String input, IDictionary additionalVariableReplacements) + { + return ExpandVariables(input, additionalVariableReplacements, true); + } + + /// + /// Replaces variable references of the form $(variable) with the corresponding replacement value. Values + /// populated into the environment directly are used first. If no value is found in the automation environment + /// then the machine environment variables will be used as a fall back. + /// + /// The value which should be analyzed for environment variables and updated accordingly + /// Use the machine's environment variables when it is true + /// A new value with all variables expanded to their current value based on the environment + public static String ExpandVariables(String input, IDictionary additionalVariableReplacements, bool useMachineVariables) + { + // Do a quick up-front check against a regular expression to determine whether or not there is any + // reason to allocate memory to replace values in the input + if (!s_variableReferenceRegex.Value.IsMatch(input)) + { + return input; + } + + StringBuilder sb = new StringBuilder(input); + List referencedVariables = GetReferencedVariables(input); + for (Int32 i = 0; i < referencedVariables.Count; i++) + { + // The variable reference is of the format $(variable), so we start at index 2 and cut off the last ')' + // character by extracting a length of 3 less than the original length. + String variableName = referencedVariables[i].Substring(2, referencedVariables[i].Length - 3); + + String replacementValue; + if (!additionalVariableReplacements.TryGetValue(variableName, out replacementValue) && useMachineVariables) + { + replacementValue = System.Environment.GetEnvironmentVariable(variableName); + } + + if (replacementValue != null) + { + sb.Replace(referencedVariables[i], replacementValue); + } + } + + return sb.ToString(); + } + + /// + /// Replaces variable references of the form $(variable) with the corresponding replacement value. Values + /// populated into the environment directly are used first. If no value is found in the automation environment + /// then the machine environment variables will be used as a fall back. + /// + /// The value which should be analyzed for environment variables and updated accordingly + /// Use the machine's environment variables when it is true + /// A new value with all variables expanded to their current value based on the environment + public static String ExpandVariables( + String input, + VariablesDictionary additionalVariableReplacements, + Boolean useMachineVariables, + Boolean maskSecrets = false) + { + return ExpandVariables(input, (IDictionary)additionalVariableReplacements, useMachineVariables, maskSecrets); + } + + /// + /// Replaces variable references of the form $(variable) with the corresponding replacement value. Values + /// populated into the environment directly are used first. If no value is found in the automation environment + /// then the machine environment variables will be used as a fall back. + /// + /// The value which should be analyzed for environment variables and updated accordingly + /// Use the machine's environment variables when it is true + /// A new value with all variables expanded to their current value based on the environment + public static String ExpandVariables( + String input, + IDictionary additionalVariableReplacements, + Boolean useMachineVariables, + Boolean maskSecrets = false) + { + if (String.IsNullOrEmpty(input)) + { + return input; + } + + StringBuilder sb = new StringBuilder(input); + List referencedVariables = GetReferencedVariables(input); + for (Int32 i = 0; i < referencedVariables.Count; i++) + { + // The variable reference is of the format $(variable), so we start at index 2 and cut off the last ')' + // character by extracting a length of 3 less than the original length. + String variableName = referencedVariables[i].Substring(2, referencedVariables[i].Length - 3); + + VariableValue replacementValue; + if (!additionalVariableReplacements.TryGetValue(variableName, out replacementValue) && useMachineVariables) + { + replacementValue = new VariableValue { Value = System.Environment.GetEnvironmentVariable(variableName) }; + } + + if (replacementValue != null) + { + var value = replacementValue.Value; + if (replacementValue.IsSecret && maskSecrets) + { + value = "***"; + } + + sb.Replace(referencedVariables[i], value); + } + } + + return sb.ToString(); + } + + /// + /// Replaces variable references of the form variables['variable_name'] with corresponding replacement values + /// + /// Task condition + /// List of variables and their replacement values + /// + public static String ExpandConditionVariables(String condition, IDictionary additionalVariableReplacements, bool useMachineVariables) + { + // Do a quick up-front check against a regular expression to determine whether or not there is any + // reason to allocate memory to replace values in the input + if (!s_conditionVariableReferenceRegex.Value.IsMatch(condition)) + { + return condition; + } + + StringBuilder sb = new StringBuilder(condition); + MatchCollection matches = s_conditionVariableReferenceRegex.Value.Matches(condition); + + for (Int32 i = 0; i < matches.Count; i++) + { + if (matches[i].Length != 0 && matches[i].Groups.Count >= 2) + { + String referencedVariable = matches[i].Groups[0].Value; + String variableName = matches[i].Groups[1].Value; + + String replacementValue; + if (!additionalVariableReplacements.TryGetValue(variableName, out replacementValue) && useMachineVariables) + { + replacementValue = System.Environment.GetEnvironmentVariable(variableName); + } + + if (replacementValue != null) + { + string convertedValue = PrepareReplacementStringForConditions(replacementValue); + sb.Replace(referencedVariable, convertedValue); + } + } + } + + return sb.ToString(); + } + + /// + /// Prepare replacement string from the given input. For a normal input, add ' around it. + /// Convert a variable of format ${var} to variables['var'] to suit custom conditions + /// + /// input replacement value + /// + public static String PrepareReplacementStringForConditions(String replacementValue) + { + if (replacementValue == null || !IsVariable(replacementValue)) + { + return String.Format(CultureInfo.InvariantCulture, c_conditionReplacementFormat, replacementValue); + } + + List variables = GetReferencedVariables(replacementValue); + + if (variables.Count != 1 || replacementValue.Trim() != variables[0]) + { + return String.Format(CultureInfo.InvariantCulture, c_conditionReplacementFormat, replacementValue); + } + + // Start from index 2 [after $( ] and continue till last but one + string variableName = variables[0].Substring(2, variables[0].Length - 3); + return string.Format(CultureInfo.InvariantCulture, c_conditionVariableFormat, variableName); + } + + private static List GetReferencedVariables(String input) + { + Int32 nestedCount = -1; + Boolean insideMatch = false; + StringBuilder currentMatch = new StringBuilder(); + HashSet result = new HashSet(); + for (int i = 0; i < input.Length; i++) + { + if (!insideMatch && input[i] == '$' && i + 1 < input.Length && input[i + 1] == '(') + { + insideMatch = true; + } + + if (insideMatch) + { + currentMatch.Append(input[i]); + } + + if (insideMatch && input[i] == '(') + { + nestedCount++; + } + + if (insideMatch && input[i] == ')') + { + if (nestedCount == 0) + { + result.Add(currentMatch.ToString()); + currentMatch.Clear(); + insideMatch = false; + nestedCount = -1; + } + else + { + nestedCount--; + } + } + } + + if (insideMatch || nestedCount >= 0) + { + // We didn't finish the last match, that means it isn't correct so we will ignore it + Debug.Fail("We didn't finish the last match!!!!!"); + } + + return result.ToList(); + } + + private static readonly Lazy s_variableReferenceRegex = new Lazy(() => new Regex(@"\$\(([^)]+)\)", RegexOptions.Singleline | RegexOptions.Compiled), true); + private static readonly Lazy s_conditionVariableReferenceRegex = new Lazy(() => new Regex(@"variables\['([^']+)\']", RegexOptions.Singleline | RegexOptions.Compiled), true); + private const String c_conditionReplacementFormat = "'{0}'"; + private const String c_variableFormat = "$({0})"; + private const String c_conditionVariableFormat = "variables['{0}']"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VariableValue.cs b/src/Sdk/DTWebApi/WebApi/VariableValue.cs new file mode 100644 index 00000000000..99e1d8597e4 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VariableValue.cs @@ -0,0 +1,44 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.DistributedTask.WebApi +{ + [DataContract] + [KnownType(typeof(AzureKeyVaultVariableValue))] + public class VariableValue + { + public VariableValue() + { + } + + public VariableValue(VariableValue value) + : this(value.Value, value.IsSecret) + { + } + + public VariableValue(String value, Boolean isSecret) + { + Value = value; + IsSecret = isSecret; + } + + [DataMember(EmitDefaultValue = true)] + public String Value + { + get; + set; + } + + [DataMember(EmitDefaultValue = false)] + public Boolean IsSecret + { + get; + set; + } + + public static implicit operator VariableValue(String value) + { + return new VariableValue(value, false); + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/VersionParser.cs b/src/Sdk/DTWebApi/WebApi/VersionParser.cs new file mode 100644 index 00000000000..3e0ffd9510e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/VersionParser.cs @@ -0,0 +1,45 @@ +using GitHub.Services.Common; +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class VersionParser + { + public static void ParseVersion( + String version, + out Int32 major, + out Int32 minor, + out Int32 patch, + out String semanticVersion) + { + ArgumentUtility.CheckStringForNullOrEmpty(version, "version"); + + String[] segments = version.Split(new char[] { '.', '-' }, StringSplitOptions.None); + if (segments.Length < 3 || segments.Length > 4) + { + throw new ArgumentException("wrong number of segments"); + } + + if (!Int32.TryParse(segments[0], out major)) + { + throw new ArgumentException("major"); + } + + if (!Int32.TryParse(segments[1], out minor)) + { + throw new ArgumentException("minor"); + } + + if (!Int32.TryParse(segments[2], out patch)) + { + throw new ArgumentException("patch"); + } + + semanticVersion = null; + if (segments.Length == 4) + { + semanticVersion = segments[3]; + } + } + } +} diff --git a/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs b/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs new file mode 100644 index 00000000000..d0c80f868d6 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/WellKnownDistributedTaskVariables.cs @@ -0,0 +1,53 @@ +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class WellKnownDistributedTaskVariables + { + public static readonly String AccessToken = "system.accessToken"; + public static readonly String AccessTokenScope = "system.connection.accessTokenScope"; + public static readonly String AzureUserAgent = "AZURE_HTTP_USER_AGENT"; + public static readonly String CollectionId = "system.collectionId"; + public static readonly String CollectionUrl = "system.collectionUri"; + public static readonly String Culture = "system.culture"; + public static readonly String DefinitionId = "system.definitionId"; + public static readonly String DefinitionName = "system.definitionName"; + public static readonly String EnableAccessToken = "system.enableAccessToken"; + public static readonly String HostType = "system.hosttype"; + public static readonly String HubVersion = "system.hubversion"; + public static readonly String IsScheduled = "system.isScheduled"; + public static readonly String JobAttempt = "system.jobAttempt"; + public static readonly String JobDisplayName = "system.jobDisplayName"; + public static readonly String JobId = "system.jobId"; + public static readonly String JobIdentifier = "system.jobIdentifier"; + public static readonly String JobName = "system.jobName"; + public static readonly String JobParallelismTag = "system.jobParallelismTag"; + public static readonly String JobPositionInPhase = "System.JobPositionInPhase"; + public static readonly String JobStatus = "system.jobStatus"; + public static readonly String MsDeployUserAgent = "MSDEPLOY_HTTP_USER_AGENT"; + public static readonly String ParallelExecutionType = "System.ParallelExecutionType"; + public static readonly String PhaseAttempt = "system.phaseAttempt"; + public static readonly String PhaseDisplayName = "system.phaseDisplayName"; + public static readonly String PhaseId = "system.phaseId"; + public static readonly String PhaseName = "system.phaseName"; + public static readonly String PipelineStartTime = "system.pipelineStartTime"; + public static readonly String PlanId = "system.planId"; + public static readonly String RestrictSecrets = "system.restrictSecrets"; + public static readonly String RetainDefaultEncoding = "agent.retainDefaultEncoding"; + public static readonly String ServerType = "system.servertype"; + public static readonly String StageAttempt = "system.stageAttempt"; + public static readonly String StageDisplayName = "system.stageDisplayName"; + public static readonly String StageId = "system.stageId"; + public static readonly String StageName = "system.stageName"; + public static readonly String System = "system"; + public static readonly String TFCollectionUrl = "system.teamFoundationCollectionUri"; + public static readonly String TaskDefinitionsUrl = "system.taskDefinitionsUri"; + public static readonly String TaskDisplayName = "system.taskDisplayName"; + public static readonly String TaskInstanceId = "system.taskInstanceId"; + public static readonly String TaskInstanceName = "system.taskInstanceName"; + public static readonly String TeamProject = "system.teamProject"; + public static readonly String TeamProjectId = "system.teamProjectId"; + public static readonly String TimelineId = "system.timelineId"; + public static readonly String TotalJobsInPhase = "System.TotalJobsInPhase"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/WellKnownPackageTypes.cs b/src/Sdk/DTWebApi/WebApi/WellKnownPackageTypes.cs new file mode 100644 index 00000000000..3b088c7b07e --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/WellKnownPackageTypes.cs @@ -0,0 +1,9 @@ +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class WellKnownPackageTypes + { + public static readonly String Agent = "agent"; + } +} diff --git a/src/Sdk/DTWebApi/WebApi/WellKnownServiceEndpointNames.cs b/src/Sdk/DTWebApi/WebApi/WellKnownServiceEndpointNames.cs new file mode 100644 index 00000000000..d7a6853dbd9 --- /dev/null +++ b/src/Sdk/DTWebApi/WebApi/WellKnownServiceEndpointNames.cs @@ -0,0 +1,9 @@ +using System; + +namespace GitHub.DistributedTask.WebApi +{ + public static class WellKnownServiceEndpointNames + { + public const String SystemVssConnection = "SystemVssConnection"; + } +} diff --git a/src/Sdk/InternalsVisibleTo.cs b/src/Sdk/InternalsVisibleTo.cs new file mode 100644 index 00000000000..42a5445ae53 --- /dev/null +++ b/src/Sdk/InternalsVisibleTo.cs @@ -0,0 +1,9 @@ +using System.Runtime.CompilerServices; + +// [assembly: InternalsVisibleTo("Runner.Common")] +// [assembly: InternalsVisibleTo("Runner.PluginHost")] +//[assembly: InternalsVisibleTo("Runner.Plugins")] +// [assembly: InternalsVisibleTo("Runner.Listener")] +// [assembly: InternalsVisibleTo("Runner.Sdk")] +[assembly: InternalsVisibleTo("Runner.Worker")] +[assembly: InternalsVisibleTo("Test")] \ No newline at end of file diff --git a/src/Sdk/Namespaces.cs b/src/Sdk/Namespaces.cs new file mode 100644 index 00000000000..d2b8143cdee --- /dev/null +++ b/src/Sdk/Namespaces.cs @@ -0,0 +1,324 @@ +namespace AsyncFixer +{ +} + +namespace GitHub.DistributedTask.Common.Contracts +{ +} + +namespace GitHub.DistributedTask.Expressions +{ +} + +namespace GitHub.DistributedTask.Expressions.CollectionAccessors +{ +} + +namespace GitHub.DistributedTask.Logging +{ +} + +namespace GitHub.DistributedTask.ObjectTemplating +{ +} + +namespace GitHub.DistributedTask.ObjectTemplating.Schema +{ +} + +namespace GitHub.DistributedTask.ObjectTemplating.Tokens +{ +} + +namespace GitHub.DistributedTask.Orchestration.Server.Artifacts +{ +} + +namespace GitHub.DistributedTask.Pipelines +{ +} + +namespace GitHub.DistributedTask.Pipelines.Artifacts +{ +} + +namespace GitHub.DistributedTask.Pipelines.ContextData +{ +} + +namespace GitHub.DistributedTask.Pipelines.Expressions +{ +} + +namespace GitHub.DistributedTask.Pipelines.ObjectTemplating +{ +} + +namespace GitHub.DistributedTask.Pipelines.Runtime +{ +} + +namespace GitHub.DistributedTask.Pipelines.Validation +{ +} + +namespace GitHub.DistributedTask.WebApi +{ +} + +namespace GitHub.GraphProfile.WebApi +{ +} + +namespace GitHub.Services.Account +{ +} + +namespace GitHub.Services.ActivityStatistic +{ +} + +namespace GitHub.Services.Auditing +{ +} + +namespace GitHub.Services.AzureFrontDoor +{ +} + +namespace GitHub.Services.CentralizedFeature +{ +} + +namespace GitHub.Services.Client +{ +} + +namespace GitHub.Services.ClientNotification +{ +} + +namespace GitHub.Services.Commerce +{ +} + +namespace GitHub.Services.Common +{ +} + +namespace GitHub.Services.Common.ClientStorage +{ +} + +namespace GitHub.Services.Common.Diagnostics +{ +} + +namespace GitHub.Services.Common.Internal +{ +} + +namespace GitHub.Services.Compliance +{ +} + +namespace GitHub.Services.ContentSecurityPolicy +{ +} + +namespace GitHub.Services.DelegatedAuthorization +{ +} + +namespace GitHub.Services.Directories.DirectoryService +{ +} + +namespace GitHub.Services.FeatureAvailability +{ +} + +namespace GitHub.Services.FileContainer +{ +} + +namespace GitHub.Services.FormInput +{ +} + +namespace GitHub.Services.GitHubConnector +{ +} + +namespace GitHub.Services.Graph +{ +} + +namespace GitHub.Services.Graph.Client +{ +} + +namespace GitHub.Services.GroupLicensingRule +{ +} + +namespace GitHub.Services.Health +{ +} + +namespace GitHub.Services.HostAcquisition +{ +} + +namespace GitHub.Services.Identity +{ +} + +namespace GitHub.Services.Identity.Client +{ +} + +namespace GitHub.Services.Identity.Mru +{ +} + +namespace GitHub.Services.IdentityPicker +{ +} + +namespace GitHub.Services.Invitation +{ +} + +namespace GitHub.Services.Licensing +{ +} + +namespace GitHub.Services.Location +{ +} + +namespace GitHub.Services.Location.Client +{ +} + +namespace GitHub.Services.MarketingPreferences +{ +} + +namespace GitHub.Services.Notification +{ +} + +namespace GitHub.Services.OAuth +{ +} + +namespace GitHub.Services.OAuthWhitelist +{ +} + +namespace GitHub.Services.Operations +{ +} + +namespace GitHub.Services.Organization +{ +} + +namespace GitHub.Services.PermissionLevel +{ +} + +namespace GitHub.Services.Profile +{ +} + +namespace GitHub.Services.Security +{ +} + +namespace GitHub.Services.ServicePrincipal +{ +} + +namespace GitHub.Services.Servicing +{ +} + +namespace GitHub.Services.Settings +{ +} + +namespace GitHub.Services.TokenAdmin.Client +{ +} + +namespace GitHub.Services.TokenRevocation +{ +} + +namespace GitHub.Services.Tokens +{ +} + +namespace GitHub.Services.Tokens.TokenAdmin.Client +{ +} + +namespace GitHub.Services.TokenSigningKeyLifecycle +{ +} + +namespace GitHub.Services.UserMapping +{ +} + +namespace GitHub.Services.Users +{ +} + +namespace GitHub.Services.WebApi +{ +} + +namespace GitHub.Services.WebApi.Internal +{ +} + +namespace GitHub.Services.WebApi.Jwt +{ +} + +namespace GitHub.Services.WebApi.Location +{ +} + +namespace GitHub.Services.WebApi.Patch +{ +} + +namespace GitHub.Services.WebApi.Patch.Json +{ +} + +namespace GitHub.Services.WebApi.Utilities +{ +} + +namespace GitHub.Services.WebApi.Utilities.Internal +{ +} + +namespace GitHub.Services.WebApi.Xml +{ +} + +namespace GitHub.Services.WebPlatform +{ +} + +namespace GitHub.Services.Zeus +{ +} + diff --git a/src/Sdk/Resources/CommonResources.g.cs b/src/Sdk/Resources/CommonResources.g.cs new file mode 100644 index 00000000000..7f927278df2 --- /dev/null +++ b/src/Sdk/Resources/CommonResources.g.cs @@ -0,0 +1,608 @@ +using System.Globalization; + +namespace GitHub.Services.Common.Internal +{ + public static class CommonResources + { + + public static string EmptyCollectionNotAllowed() + { + const string Format = @"The collection must contain at least one element."; + return Format; + } + + public static string EmptyStringNotAllowed() + { + const string Format = @"The string must have at least one character."; + return Format; + } + + public static string StringLengthNotAllowed(object arg0, object arg1, object arg2) + { + const string Format = @"Length of '{0}' is invalid. It must be between {1} and {2} characters."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string EmptyGuidNotAllowed(object arg0) + { + const string Format = @"The guid specified for parameter {0} must not be Guid.Empty."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidPropertyName(object arg0) + { + const string Format = @"Invalid property name: '{0}'. Property names cannot contain leading or trailing whitespace."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidPropertyValueSize(object arg0, object arg1, object arg2) + { + const string Format = @"The value of property '{0}' exceeds the maximum size allowed. '{1}' values must not exceed '{2}' bytes."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string DateTimeKindMustBeSpecified() + { + const string Format = @"The DateTimeKind (Local, UTC) must be specified for DateTime arguments."; + return Format; + } + + public static string PropertyArgumentExceededMaximumSizeAllowed(object arg0, object arg1) + { + const string Format = @"The argument '{0}' is too long. It must not contain more than '{1}' characters."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidStringPropertyValueNullAllowed(object arg0, object arg1, object arg2, object arg3, object arg4) + { + const string Format = @"""{0}"" is an invalid value for the {1} of a {2}. The text must be null or between {3} and {4} characters long."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3, arg4); + } + + public static string InvalidStringPropertyValueNullForbidden(object arg0, object arg1, object arg2, object arg3, object arg4) + { + const string Format = @"""{0}"" is an invalid value for the {1} of a {2}. The text must be between {3} and {4} characters long and cannot be null."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3, arg4); + } + + public static string ValueTypeOutOfRange(object arg0, object arg1, object arg2, object arg3, object arg4) + { + const string Format = @"{0} is out of range for the {1} of a {2}. The value must be between {3} and {4}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3, arg4); + } + + public static string VssPropertyValueOutOfRange(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Property '{0}' with value '{1}' is out of range for the Properties service. The value must be between {2} and {3}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string VssInvalidUnicodeCharacter(object arg0) + { + const string Format = @"The string argument contains a character that is not valid:'u{0:X4}'. Correct the argument, and then try the operation again."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorReadingFile(object arg0, object arg1) + { + const string Format = @"Error reading file: {0} ({1})."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string IllegalBase64String() + { + const string Format = @"Illegal attempt to decode a malformed Base64-encoded string."; + return Format; + } + + public static string CannotPromptIfNonInteractive() + { + const string Format = @"The prompt option is invalid because the process is not interactive."; + return Format; + } + + public static string StringContainsInvalidCharacters(object arg0) + { + const string Format = @"The string argument contains a character that is not valid:'{0}'. Correct the argument, and then try the operation again."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string DoubleValueOutOfRange(object arg0, object arg1) + { + const string Format = @"Property '{0}' with value '{1}' is out of range for the Team Foundation Properties service. Double values must be 0, within -1.79E+308 to -2.23E-308, or within 2.23E-308 to 1.79E+308."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string HttpRequestTimeout(object arg0) + { + const string Format = @"The HTTP request timed out after {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string VssUnauthorized(object arg0) + { + const string Format = @"You are not authorized to access {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string VssUnauthorizedUnknownServer() + { + const string Format = @"You are not authorized to access the server."; + return Format; + } + + public static string XmlAttributeEmpty(object arg0, object arg1) + { + const string Format = @"The attribute '{0}' on node '{1}' cannot be empty"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string XmlAttributeNull(object arg0, object arg1) + { + const string Format = @"The node '{0}' must only have the attribute '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string XmlNodeEmpty(object arg0, object arg1) + { + const string Format = @"The xml node '{0}' under node '{1}' cannot be empty"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string XmlNodeMissing(object arg0, object arg1) + { + const string Format = @"The mandatory xml node '{0}' is missing under '{1}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string VssUnsupportedPropertyValueType(object arg0, object arg1) + { + const string Format = @"Property '{0}' of type '{1}' is not supported by the Properties service. Convert the value to an Int32, DateTime, Double, String or Byte array for storage."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ErrorDependencyOptionNotProvided(object arg0, object arg1) + { + const string Format = @"Option '{0}' requires that option '{1}' be provided as well"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ErrorInvalidEnumValueTypeConversion(object arg0) + { + const string Format = @"Invalid enumeration data type '{0}'. The type must be a valid enumeration."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorInvalidResponseFileOption(object arg0) + { + const string Format = @"The value provided {0} does not represent a valid response file option. A response file option must be a valid path that begins with the '@' sign (ex: @C:\Folder\ResponseFile.txt)"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorInvalidValueTypeConversion(object arg0, object arg1) + { + const string Format = @"The value '{0}' is not a valid value for argument of type '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ErrorOptionArgumentsNotDefined() + { + const string Format = @"Option arguments are not defined"; + return Format; + } + + public static string ErrorOptionMultiplesNotAllowed(object arg0) + { + const string Format = @"Option '{0}' does not allow multiples/duplicates"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionMustExist(object arg0) + { + const string Format = @"Option '{0}' is required"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionNotRecognized(object arg0) + { + const string Format = @"Invalid option usage. Option '{0}' is not a recognized argument."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionRequired(object arg0) + { + const string Format = @"Option '{0}' is required."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionRequiresValue(object arg0) + { + const string Format = @"Option '{0}' requires a value"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionRunsDoNotSupportValues() + { + const string Format = @"Option runs do not support values"; + return Format; + } + + public static string ErrorOptionsAreMutuallyExclusive(object arg0) + { + const string Format = @"The following options are mutually exclusive. Only 1 may be defined at a time with respect to the others: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionsAreMutuallyInclusive(object arg0) + { + const string Format = @"The following options are mutually inclusive. If one or more are defined, then all must be defined: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionValueConverterNotFound(object arg0) + { + const string Format = @"Option value conversion failed. A value converter to handle converting arguments of type '{0}' was not found in the set of converters provided."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionValueNotAllowed(object arg0) + { + const string Format = @"Option '{0}' does not require or allow a value"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionValuesDoNotMatchExpected(object arg0, object arg1) + { + const string Format = @"The value for option {0} does not match any of the expected values: {1}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ErrorPositionalArgumentsNotAllowed() + { + const string Format = @"Positional arguments are not allowed"; + return Format; + } + + public static string ErrorRequiredOptionDoesNotExist(object arg0) + { + const string Format = @"Option '{0}' is a required option but was not provided"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorResponseFileNotFound(object arg0) + { + const string Format = @"Response file not found at path '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorResponseFileOptionNotSupported() + { + const string Format = @"A response file option was provided, but the parser does not support the usage of response files."; + return Format; + } + + public static string ErrorValueCannotBeConvertedToEnum(object arg0, object arg1) + { + const string Format = @"The value '{0}' cannot be converted to a valid '{1}' enumeration value."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string OperationHandlerNotFound(object arg0) + { + const string Format = @"Operation handler not found for the set of arguments provided: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorInvalidValueConverterOrNoDefaultFound(object arg0) + { + const string Format = @"A valid value converter was not defined for the class member '{0}' option definition and no default value converter could be found. Define the Converter property on the option to supply the value converter."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOperationHandlerConstructorNotFound(object arg0) + { + const string Format = @"Operation handler creation failed. A valid constructor taking the parameters provided was not found on handler of type '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOperationHandlerNotFound() + { + const string Format = @"Operation handler not found. An operation mode handler was not found for the arguments provided."; + return Format; + } + + public static string ErrorDuplicateDefaultOperationModeHandlerFound() + { + const string Format = @"Duplicate default operation handler found. A distinct operation handler could not be determined because no handler matched the mode provided on the command-line and more than 1 handler marked as default was found."; + return Format; + } + + public static string ErrorDuplicateOperationModeHandlerFound() + { + const string Format = @"Duplicate operation handler found. A distinct operation handler could not be determined because more than 1 matched the operation mode provided on the command-line."; + return Format; + } + + public static string ErrorInvalidValueConverterDataType(object arg0, object arg1) + { + const string Format = @"Invalid value converter data type. The type {0} is not a valid {1} implementation. Value converters must implement this interface."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ErrorMembersContainingPositionalsRequireCollection() + { + const string Format = @"Invalid backing field or property for positional arguments. Class members containing the values for positional arguments must be a collection type having an 'Add' method."; + return Format; + } + + public static string ErrorDuplicatePositionalOptionAttributes(object arg0) + { + const string Format = @"Duplicate {0} attribute definition. Only a single member (including inherited members) may be decorated with a {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionsAllowingMultiplesRequireCollection(object arg0) + { + const string Format = @"Invalid backing field or property for option '{0}'. Class members containing the values for options that allow multiples must be a collection type having an 'Add' method."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionNotFound(object arg0) + { + const string Format = @"Option not found or is case-sensitive: '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ErrorOptionFlagRequiresBooleanMember(object arg0) + { + const string Format = @"Option '{0}' must have a boolean member type. Options that do not take arguments (i.e. used as flags, ex: /v /f) must have a System.Boolean member type. This member is set to true when the flag exists and false if not."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContentIdCalculationBlockSizeError(object arg0) + { + const string Format = @"All blocks except the final block must be {0} bytes."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string BasicAuthenticationRequiresSsl() + { + const string Format = @"Basic authentication requires a secure connection to the server."; + return Format; + } + + public static string ValueOutOfRange(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"The value {0} is out of range of valid values for parameter {1}. Valid values must be between {2} and {3}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string OutOfRange(object arg0) + { + const string Format = @"The value {0} is outside of the allowed range."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ValueMustBeGreaterThanZero() + { + const string Format = @"The value must be greater than zero."; + return Format; + } + + public static string NullValueNecessary(object arg0) + { + const string Format = @"The value specified for the following variable must be null: {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string LowercaseStringRequired(object arg0) + { + const string Format = @"The string argument '{0}' must only consist of lowercase characters."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UppercaseStringRequired(object arg0) + { + const string Format = @"The string argument '{0}' must only consist of uppercase characters."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string EmptyArrayNotAllowed() + { + const string Format = @"The array must contain at least one element."; + return Format; + } + + public static string EmptyOrWhiteSpaceStringNotAllowed() + { + const string Format = @"The string must have at least one non-white-space character."; + return Format; + } + + public static string StringLengthNotMatch(object arg0) + { + const string Format = @"Length of the string does not match with '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string BothStringsCannotBeNull(object arg0, object arg1) + { + const string Format = @"One of the following values must not be null or String.Empty: {0}, {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string WhiteSpaceNotAllowed() + { + const string Format = @"The string cannot contain any whitespace characters."; + return Format; + } + + public static string UnexpectedType(object arg0, object arg1) + { + const string Format = @"Expecting '{0}' to be of type '{1}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidEmailAddressError() + { + const string Format = @"The supplied email address is invalid."; + return Format; + } + + public static string AbsoluteVirtualPathNotAllowed(object arg0) + { + const string Format = @"An absolute virtual path is not allowed. Remove the leading slash: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UriUtility_AbsoluteUriRequired(object arg0) + { + const string Format = @"The following URL is not valid: {0}. You must specify an absolute path."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UriUtility_RelativePathInvalid(object arg0) + { + const string Format = @"The following relative path is not valid: {0}. It must be both well formed and relative. It might be an absolute path."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UriUtility_UriNotAllowed(object arg0) + { + const string Format = @"The following URL is not valid: {0}. It must begin with http or https."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UriUtility_MustBeAuthorityOnlyUri(object arg0, object arg1) + { + const string Format = @"The following URL is not valid: {0}. Try removing any relative path information from the URL (for example, {1})."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string UrlNotValid() + { + const string Format = @"The URL that you specified is not valid. The URL must begin with either HTTP or HTTPS and be a valid address."; + return Format; + } + + public static string MalformedArtifactId(object arg0) + { + const string Format = @"The artifact is not understood by this application. Either the artifact supplied is invalid or the application doesn't have the required software updates. Artifact Id: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MalformedUri(object arg0) + { + const string Format = @"Malformed Artifact URI: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MalformedUrl(object arg0) + { + const string Format = @"Malformed Artifact URL: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string NullArtifactUrl() + { + const string Format = @"Null Artifact Url"; + return Format; + } + + public static string FailureGetArtifact() + { + const string Format = @"Unable to get artifacts from tool."; + return Format; + } + + public static string NullArtifactUriRoot() + { + const string Format = @"ArtifactUriRoot is Null"; + return Format; + } + + public static string UnknownTypeForSerialization(object arg0) + { + const string Format = @"Unknown object type '{0}' for serialization."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StringContainsIllegalChars() + { + const string Format = @"The value contains characters that are not allowed (control characters, 0xFFFE, or 0xFFFF). Please remove those characters."; + return Format; + } + + public static string ValueEqualsToInfinity() + { + const string Format = @"The value must be a finite value."; + return Format; + } + + public static string SingleBitRequired(object arg0) + { + const string Format = @"The value {0} must contain a single bit flag."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidEnumArgument(object arg0, object arg1, object arg2) + { + const string Format = @"The value of argument '{0}' ({1}) is invalid for Enum type '{2}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string ConflictingPathSeparatorForVssFileStorage(object arg0, object arg1, object arg2) + { + const string Format = @"There is a conflict with the path separator character '{0}' requested for VssFileStorage at file path: {1} A previous instance was created with a path separator of '{2}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string ConflictingStringComparerForVssFileStorage(object arg0, object arg1, object arg2) + { + const string Format = @"There is a conflict with the string comparer '{0}' requested for VssFileStorage at file path: {1} A previous instance was created with a string comparer of '{2}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string InvalidClientStoragePath(object arg0, object arg1) + { + const string Format = @"The storage path specified is invalid: '{0}' This storage path cannot be null or empty. It should begin with the '{1}' path separator character, and have no empty path segments."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string CollectionSizeLimitExceeded(object arg0, object arg1) + { + const string Format = @"Collection '{0}' can have maximum '{1}' elements."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string DefaultValueNotAllowed(object arg0) + { + const string Format = @"The value {0} must not be set to the default."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string NullElementNotAllowedInCollection() + { + const string Format = @"Null elements are not allowed in the collection."; + return Format; + } + + public static string InvalidUriError(object arg0) + { + const string Format = @"Supplied URI is invalid. The URI should match {0} URI kind format."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SubjectDescriptorEmpty(object arg0) + { + const string Format = @"The subject descriptor specified for parameter {0} must not be empty."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string EUIILeakException(object arg0) + { + const string Format = @"Event payload contains EUII. Message: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + } +} diff --git a/src/Sdk/Resources/ContentResources.g.cs b/src/Sdk/Resources/ContentResources.g.cs new file mode 100644 index 00000000000..d9c4970c98b --- /dev/null +++ b/src/Sdk/Resources/ContentResources.g.cs @@ -0,0 +1,20 @@ +using System.Globalization; + +namespace GitHub.Services.Content.Common +{ + public static class ContentResources + { + + public static string InvalidHexString(object arg0) + { + const string Format = @"Invalid hex string. The string value provided {0} is not a valid hex string."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ArtifactBillingException() + { + const string Format = @"Artifact cannot be uploaded because max quantity has been exceeded or the payment instrument is invalid."; + return Format; + } + } +} diff --git a/src/Sdk/Resources/ExpressionResources.g.cs b/src/Sdk/Resources/ExpressionResources.g.cs new file mode 100644 index 00000000000..b5af2c67a0e --- /dev/null +++ b/src/Sdk/Resources/ExpressionResources.g.cs @@ -0,0 +1,116 @@ +using System.Globalization; + +namespace GitHub.DistributedTask.Expressions +{ + public static class ExpressionResources + { + + public static string ExceededAllowedMemory(object arg0) + { + const string Format = @"The maximum allowed memory size was exceeded while evaluating the following expression: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ExceededMaxExpressionDepth(object arg0) + { + const string Format = @"Exceeded max expression depth {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ExceededMaxExpressionLength(object arg0) + { + const string Format = @"Exceeded max expression length {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ExpectedPropertyName() + { + const string Format = @"Expected a property name to follow the dereference operator '.'"; + return Format; + } + + public static string ExpectedStartParameter() + { + const string Format = @"Expected '(' to follow a function"; + return Format; + } + + public static string InvalidFormatArgIndex(object arg0) + { + const string Format = @"The following format string references more arguments than were supplied: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidFormatSpecifiers(object arg0, object arg1) + { + const string Format = @"The format specifiers '{0}' are not valid for objects of type '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidFormatString(object arg0) + { + const string Format = @"The following format string is invalid: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string KeyNotFound(object arg0) + { + const string Format = @"Key not found '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ParseErrorWithFwlink(object arg0) + { + const string Format = @"{0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ParseErrorWithTokenInfo(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"{0}: '{1}'. Located at position {2} within expression: {3}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string TypeCastError(object arg0, object arg1, object arg2) + { + const string Format = @"Unable to convert from {0} to {1}. Value: {2}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string TypeCastErrorNoValue(object arg0, object arg1) + { + const string Format = @"Unable to convert from {0} to {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string TypeCastErrorWithError(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Unable to convert from {0} to {1}. Value: {2}. Error: {3}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string UnclosedFunction() + { + const string Format = @"Unclosed function"; + return Format; + } + + public static string UnclosedIndexer() + { + const string Format = @"Unclosed indexer"; + return Format; + } + + public static string UnexpectedSymbol() + { + const string Format = @"Unexpected symbol"; + return Format; + } + + public static string UnrecognizedValue() + { + const string Format = @"Unrecognized value"; + return Format; + } + } +} diff --git a/src/Sdk/Resources/FileContainerResources.g.cs b/src/Sdk/Resources/FileContainerResources.g.cs new file mode 100644 index 00000000000..0a058fb8d82 --- /dev/null +++ b/src/Sdk/Resources/FileContainerResources.g.cs @@ -0,0 +1,104 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class FileContainerResources + { + + public static string ArtifactUriNotSupportedException(object arg0) + { + const string Format = @"The artifact Uri {0} is not supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerNotFoundException(object arg0) + { + const string Format = @"The container {0} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerItemNotFoundException(object arg0, object arg1) + { + const string Format = @"The item {0} in container {1} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ContainerItemWithDifferentTypeExists(object arg0, object arg1) + { + const string Format = @"The items could not be created because an item with type {0} already exists at {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PendingUploadNotFoundException(object arg0) + { + const string Format = @"The pending upload {0} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerItemDoesNotExist(object arg0, object arg1) + { + const string Format = @"The item {0} of type {1} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ContainerItemCopySourcePendingUpload(object arg0) + { + const string Format = @"The source item {0} is in the pending upload state."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerItemCopyTargetChildOfSource(object arg0, object arg1) + { + const string Format = @"The target folder {0} of the copy operation is a child of the source folder {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ContainerItemCopyDuplicateTargets(object arg0) + { + const string Format = @"The target location {0} is specified for two or more sources."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerAlreadyExists(object arg0) + { + const string Format = @"Container with artifact {0} already exists."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UnexpectedContentType(object arg0, object arg1) + { + const string Format = @"Requested content type {0} but got back content type {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string NoContentReturned() + { + const string Format = @"The request returned no content."; + return Format; + } + + public static string GzipNotSupportedOnServer() + { + const string Format = @"The server does not support gzipped content."; + return Format; + } + + public static string BadCompression() + { + const string Format = @"The file length passed in is less than or equal to the compressed stream length."; + return Format; + } + + public static string ChunksizeWrongWithContentId(object arg0) + { + const string Format = @"The chunk size must be a multiple of {0} bytes when specifying a contentId."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContentIdCollision(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"There was a contentId collision for file {0} with length {1} and file {2} with length {3}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + } +} diff --git a/src/Sdk/Resources/GraphResources.g.cs b/src/Sdk/Resources/GraphResources.g.cs new file mode 100644 index 00000000000..067dd653924 --- /dev/null +++ b/src/Sdk/Resources/GraphResources.g.cs @@ -0,0 +1,92 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class GraphResources + { + + public static string CannotEditChildrenOfNonGroup(object arg0) + { + const string Format = @"VS403339: Cannot add or remove child from graph subject with descriptor '{0}' because it is not a group."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string EmptySubjectDescriptorNotAllowed(object arg0) + { + const string Format = @"VS403350: The empty subject descriptor is not a valid value for parameter '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string WellKnownSidNotAllowed(object arg0) + { + const string Format = @"VS403350: Well-known SIDs are not valid for the parameter '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string GraphMembershipNotFound(object arg0, object arg1) + { + const string Format = @"VS403328: The graph membership for member descriptor '{0}' and container descriptor '{1}' could not be found. You may need to create this membership in the enclosing enterprise or organization."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string GraphSubjectNotFound(object arg0) + { + const string Format = @"VS403325: The graph subject with descriptor '{0}' could not be found. You may need to create the subject in the enclosing enterprise, or add organization-level memberships to make a subject in the enterprise visible in the enclosing organization"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidGraphLegacyDescriptor(object arg0) + { + const string Format = @"VS860018: The provided legacy descriptor '{0}' is not a valid legacy descriptor for this end point."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidGraphMemberCuid(object arg0) + { + const string Format = @"VS403323: Cannot find graph member storage key for cuid: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidGraphMemberStorageKey(object arg0) + { + const string Format = @"VS403324: Cannot find graph member cuid for storage key {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidGraphSubjectDescriptor(object arg0) + { + const string Format = @"VS860021: The provided descriptor '{0}' is not a valid graph subject descriptor for this end point."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StorageKeyNotFound(object arg0) + { + const string Format = @"VS403369: The storage key for descriptor '{0}' could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SubjectDescriptorNotFoundWithIdentityDescriptor(object arg0) + { + const string Format = @"VS403370: The subject descriptor for identity descriptor '{0}' could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SubjectDescriptorNotFoundWithStorageKey(object arg0) + { + const string Format = @"VS403368: The subject descriptor for storage key '{0}' could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentifierLengthOutOfRange() + { + const string Format = @"Given identifier length is out of range of valid values."; + return Format; + } + + public static string SubjectTypeLengthOutOfRange() + { + const string Format = @"Given subject type length is out of range of valid values."; + return Format; + } + } +} diff --git a/src/Sdk/Resources/IdentityResources.g.cs b/src/Sdk/Resources/IdentityResources.g.cs new file mode 100644 index 00000000000..1d6bc1c4fea --- /dev/null +++ b/src/Sdk/Resources/IdentityResources.g.cs @@ -0,0 +1,400 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class IdentityResources + { + + public static string FieldReadOnly(object arg0) + { + const string Format = @"{0} is read-only."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string GROUPCREATIONERROR(object arg0, object arg1) + { + const string Format = @"A group named {0} already exists in scope {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ADDMEMBERCYCLICMEMBERSHIPERROR(object arg0, object arg1) + { + const string Format = @"A cyclic group containment error occurred when adding a group member. The group {1} already has the group {0} as a contained member."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string GROUPSCOPECREATIONERROR(object arg0) + { + const string Format = @"The group scope {0} already exists"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ADDMEMBERIDENTITYALREADYMEMBERERROR(object arg0, object arg1) + { + const string Format = @"The group {0} already has a member {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string REMOVEGROUPMEMBERNOTMEMBERERROR(object arg0) + { + const string Format = @"An error occurred removing the group member. There is no group member with the security identifier (SID) {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string REMOVEADMINGROUPERROR() + { + const string Format = @"This group cannot be removed. The existence of this Administrators group is required."; + return Format; + } + + public static string REMOVEEVERYONEGROUPERROR() + { + const string Format = @"This group cannot be removed. The existence of this Valid Users group is required."; + return Format; + } + + public static string REMOVESERVICEGROUPERROR() + { + const string Format = @"This group cannot be removed. The existence of this Service Accounts group is required."; + return Format; + } + + public static string REMOVESPECIALGROUPERROR() + { + const string Format = @"This group cannot be removed. The existence of this group is required."; + return Format; + } + + public static string FINDGROUPSIDDOESNOTEXISTERROR(object arg0) + { + const string Format = @"An error occurred finding the group. There is no group with the security identifier (SID) {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string GROUPRENAMEERROR(object arg0) + { + const string Format = @"Error renaming group, a group named {0} already exists."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string GROUPSCOPEDOESNOTEXISTERROR(object arg0) + { + const string Format = @"The identity scope {0} does not exist"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityNotFoundMessage(object arg0) + { + const string Format = @"The identity with type '{0}' could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityNotFoundWithDescriptor(object arg0, object arg1) + { + const string Format = @"The identity with type '{0}' and identifier '{1}' could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string IdentityNotFoundSimpleMessage() + { + const string Format = @"The identity could not be found."; + return Format; + } + + public static string IdentityNotFoundWithTfid(object arg0) + { + const string Format = @"The identity with TeamFoundationId {0} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityNotFoundWithName(object arg0) + { + const string Format = @"The identity with name {0} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityAccountNameAlreadyInUseError(object arg0) + { + const string Format = @"The identity account name '{0}' is already in use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityAccountNameCollisionRepairFailedError(object arg0) + { + const string Format = @"Support will be required to repair this account. An attempt to repair an account name collision for identity '{0}' failed and cannot be completed automatically."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityAccountNameCollisionRepairUnsafeError(object arg0) + { + const string Format = @"Support will be required to repair this account. An attempt to repair an account name collision for identity '{0}' is unsafe and cannot be completed automatically."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityAliasAlreadyInUseError(object arg0) + { + const string Format = @"The identity alias '{0}' is already in use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidNameNotRecognized(object arg0) + { + const string Format = @"You have specified a name, {0}, that contains character(s) that are not recognized. Specify a name that only contains characters that are supported by the database collation setting and try again."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityMapReadOnlyException() + { + const string Format = @"The identity map cannot be accessed while the collection is detached."; + return Format; + } + + public static string IdentityAccountNamesAlreadyInUseError(object arg0, object arg1) + { + const string Format = @"{0} identity account names including '{1}' are already in use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidServiceIdentityName(object arg0) + { + const string Format = @"Service identities are limited to a maximum of 200 characters, and may only contain alpha numeric, dash, and space characters. The name '{0}' is not a valid service identity name."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string AccountPreferencesAlreadyExist() + { + const string Format = @"Organization preferences have already been set. You can only set the preferences for language, culture, and time zone when the organization is created, and these preferences cannot be changed."; + return Format; + } + + public static string ADDGROUPMEMBERILLEGALINTERNETIDENTITY(object arg0) + { + const string Format = @"Internet identities cannot be added to this server. Unable to add {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ADDGROUPMEMBERILLEGALWINDOWSIDENTITY(object arg0) + { + const string Format = @"Windows users cannot be added to this server. Unable to add {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ADDPROJECTGROUPTPROJECTMISMATCHERROR(object arg0, object arg1) + { + const string Format = @"Project group '{1}' cannot be added to group '{0}', it is from a different project."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string CANNOT_REMOVE_SERVICE_ACCOUNT() + { + const string Format = @"You cannot remove the service account from the Service Accounts group."; + return Format; + } + + public static string IDENTITYDOMAINDOESNOTEXISTERROR(object arg0) + { + const string Format = @"No identity domain exists with the following security identifier (SID): {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IDENTITYDOMAINMISMATCHERROR(object arg0, object arg1) + { + const string Format = @"The group that you wish to manage is not owned by service host {0}, it is owned by {1}. Please target your request at the correct host."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string IdentityProviderUnavailable(object arg0, object arg1) + { + const string Format = @"The identity provider for type {0}, identifier {1} is unavailable."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string IDENTITY_SYNC_ERROR(object arg0) + { + const string Format = @"Sync error for identity: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IllegalIdentityException(object arg0) + { + const string Format = @"The user or group name {0} contains unsupported characters, is empty, or too long."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MODIFYEVERYONEGROUPEXCEPTION() + { + const string Format = @"The Valid Users group cannot be modified directly."; + return Format; + } + + public static string NOT_APPLICATION_GROUP() + { + const string Format = @"The identity you are attempting to edit is not an application group."; + return Format; + } + + public static string NOT_A_SECURITY_GROUP(object arg0) + { + const string Format = @"The group {0} is not a security group and cannot be added to Server."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string REMOVENONEXISTENTGROUPERROR(object arg0) + { + const string Format = @"An error occurred removing the group. There is no group with the security identifier (SID) {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string RemoveSelfFromAdminGroupError(object arg0) + { + const string Format = @"You cannot remove yourself from the Administrators group. This is a safeguard to prevent an enterprise locking themselves out of a deployment or project collection. Please have another administrator remove your membership. Alternatively you can disable the safeguard by setting {0} to false in the TF registry."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ADDPROJECTGROUPTOGLOBALGROUPERROR(object arg0, object arg1) + { + const string Format = @"You cannot add the project group {0} to the global group {1}. "; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string DynamicIdentityTypeCreationNotSupported() + { + const string Format = @"Dynamic creation of identity types is no longer supported. Please check that the type of the identity you are trying to create is supported. "; + return Format; + } + + public static string TooManyResultsError() + { + const string Format = @"The query was aborted because it returned too many results. Please apply additional filters to reduce the size of the resultset returned."; + return Format; + } + + public static string IncompatibleScopeError(object arg0, object arg1) + { + const string Format = @"Group cannot be created in the requested scope {1} since the requested scope is not within the root scope {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidIdentityIdTranslations() + { + const string Format = @"New translations have a record that may corrupt the existing translation data."; + return Format; + } + + public static string MultipleIdentitiesFoundError(object arg0, object arg1) + { + const string Format = @"Multiple identities found matching '{0}'. Please specify one of the following identities: + +{1}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string IdentityIdTranslationsAreMigrated() + { + const string Format = @"Identity id translations are migrated to collection partition."; + return Format; + } + + public static string InvalidGetDescriptorRequestWithLocalId(object arg0) + { + const string Format = @"Input parameter '{0}' is not a valid local id."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityMaterializationFailedMessage(object arg0) + { + const string Format = @"Could not add user '{0}' at this time."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityDescriptorNotFoundWithMasterId(object arg0) + { + const string Format = @"Identity descriptor for master id '{0}' not found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IdentityDescriptorNotFoundWithLocalId(object arg0) + { + const string Format = @"Identity descriptor for local id '{0}' not found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string TooManyRequestedItemsError() + { + const string Format = @"The request was aborted because it contained too many requested items."; + return Format; + } + + public static string TooManyRequestedItemsErrorWithCount(object arg0, object arg1) + { + const string Format = @"The request was aborted because it contained too many requested items {0}, maximum allowed is {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidIdentityKeyMaps() + { + const string Format = @"New identity key maps have a record that may corrupt the existing key map data."; + return Format; + } + + public static string InvitationPendingMessage(object arg0, object arg1) + { + const string Format = @"{0} has not accepted the invitation to the {1} organization."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ShouldBePersonalAccountMessage() + { + const string Format = @"Your work or school account does not have access to this resource, but your personal account does."; + return Format; + } + + public static string ShouldCreatePersonalAccountMessage() + { + const string Format = @"The account you are trying to access only allows Microsoft Accounts. Please create a Microsoft Account with a different email address and ask your administrator to invite the new Microsoft Account."; + return Format; + } + + public static string ShouldBeWorkAccountMessage() + { + const string Format = @"Your personal account does not have access to this resource, but your work or school account does."; + return Format; + } + + public static string IdentityNotFoundInCurrentDirectory() + { + const string Format = @"The identity could not be found in the current directory."; + return Format; + } + + public static string InvalidIdentityIdException(object arg0) + { + const string Format = @"The identity ID is invalid for identity: {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidIdentityDescriptorException(object arg0) + { + const string Format = @"The identity descriptor is invalid for identity: {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string RestoreGroupScopeValidationError(object arg0) + { + const string Format = @"Restore group scope validation error: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string AccountOwnerCannotBeRemovedFromGroup(object arg0) + { + const string Format = @"Current account owner is not allowed to be removed from {0} group. Please change the account owner and try again."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ProjectCollectionAdministrators() + { + const string Format = @"Project Collection Administrators"; + return Format; + } + } +} diff --git a/src/Sdk/Resources/JwtResources.g.cs b/src/Sdk/Resources/JwtResources.g.cs new file mode 100644 index 00000000000..b52e9383ff1 --- /dev/null +++ b/src/Sdk/Resources/JwtResources.g.cs @@ -0,0 +1,146 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class JwtResources + { + + public static string ActorValidationException() + { + const string Format = @"The ActorToken within the JsonWebToken is invalid."; + return Format; + } + + public static string DeserializationException() + { + const string Format = @"Failed to deserialize the JsonWebToken object."; + return Format; + } + + public static string DigestUnsupportedException(object arg0, object arg1) + { + const string Format = @"JsonWebTokens support only the {0} Digest, but the signing credentials specify {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string EncodedTokenDataMalformed() + { + const string Format = @"The encoded data in the JsonWebToken is malformed."; + return Format; + } + + public static string InvalidAudienceException() + { + const string Format = @"The audience of the token is invalid."; + return Format; + } + + public static string InvalidClockSkewException() + { + const string Format = @"The value supplied for ClockSkewInSeconds is invalid. It must be a positive integer."; + return Format; + } + + public static string InvalidIssuerException() + { + const string Format = @"The issuer of the JsonWebToken is not valid."; + return Format; + } + + public static string InvalidSignatureAlgorithm() + { + const string Format = @"The signature algorithm in the JsonWebToken header is invalid."; + return Format; + } + + public static string InvalidValidFromValueException() + { + const string Format = @"The ValidFrom value in not valid."; + return Format; + } + + public static string InvalidValidToValueException() + { + const string Format = @"The ValidTo value is not valid."; + return Format; + } + + public static string ProviderTypeUnsupported(object arg0) + { + const string Format = @"JsonWebTokens do not support crypto provider of type {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SerializationException() + { + const string Format = @"Failed to serialize the JsonWebToken object."; + return Format; + } + + public static string SignatureAlgorithmUnsupportedException(object arg0) + { + const string Format = @"JsonWebTokens do not support the supplied signature algorithm: {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SignatureNotFound() + { + const string Format = @"The JsonWebToken is not signed, or the signature has not been found."; + return Format; + } + + public static string SignatureValidationException() + { + const string Format = @"The signature is not valid."; + return Format; + } + + public static string SymmetricSecurityKeyNotFound() + { + const string Format = @"The supplied Signing Credential is not a SymmetricSigningCredential and does not match the Signature Algorithm."; + return Format; + } + + public static string TokenExpiredException() + { + const string Format = @"The token is expired."; + return Format; + } + + public static string TokenNotYetValidException() + { + const string Format = @"The token is not yet valid."; + return Format; + } + + public static string ValidFromAfterValidToException() + { + const string Format = @"The time represented by the ValidFrom value come after the time represented by the ValidTo value."; + return Format; + } + + public static string SigningTokenExpired() + { + const string Format = @"The supplied signing token has expired."; + return Format; + } + + public static string SigningTokenNoPrivateKey() + { + const string Format = @"The signing token has no private key and cannot be used for signing."; + return Format; + } + + public static string SigningTokenKeyTooSmall() + { + const string Format = @"The key size of the supplied signing token is too small. It must be at least 2048 bits."; + return Format; + } + + public static string TokenScopeNotAuthorizedException() + { + const string Format = @"The token scope is not valid."; + return Format; + } + } +} diff --git a/src/Sdk/Resources/LocationResources.g.cs b/src/Sdk/Resources/LocationResources.g.cs new file mode 100644 index 00000000000..db35691f673 --- /dev/null +++ b/src/Sdk/Resources/LocationResources.g.cs @@ -0,0 +1,20 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class LocationResources + { + + public static string CannotChangeParentDefinition(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Cannot change parent definition. Service type {0}, identifier {1}, parent service type {2}, identifier {3}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string ParentDefinitionNotFound(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Cannot save service definition with type {0} identifier {1} because parent definition with type {2} identifier {3} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + } +} diff --git a/src/Sdk/Resources/PatchResources.g.cs b/src/Sdk/Resources/PatchResources.g.cs new file mode 100644 index 00000000000..36172c68c2c --- /dev/null +++ b/src/Sdk/Resources/PatchResources.g.cs @@ -0,0 +1,122 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class PatchResources + { + + public static string CannotReplaceNonExistantValue(object arg0) + { + const string Format = @"Attempted to replace a value that does not exist at path {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string IndexOutOfRange(object arg0) + { + const string Format = @"Index out of range for path {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InsertNotSupported(object arg0) + { + const string Format = @"{0} does not support insert."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidOperation() + { + const string Format = @"Unrecognized operation type."; + return Format; + } + + public static string InvalidValue(object arg0, object arg1) + { + const string Format = @"Value {0} does not match the expected type {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MoveCopyNotImplemented() + { + const string Format = @"Move/Copy is not implemented."; + return Format; + } + + public static string NullOrEmptyOperations() + { + const string Format = @"At least one operation is required for Apply."; + return Format; + } + + public static string PathCannotBeNull() + { + const string Format = @"Path cannot be null."; + return Format; + } + + public static string PathInvalidEndValue() + { + const string Format = @"Path cannot end with /."; + return Format; + } + + public static string PathInvalidStartValue() + { + const string Format = @"Path is required to start with a / or be """"."; + return Format; + } + + public static string TargetCannotBeNull() + { + const string Format = @"Evaluated target should not be null."; + return Format; + } + + public static string TestFailed(object arg0, object arg1, object arg2) + { + const string Format = @"Test Operation for path {0} failed, value {1} was not equal to test value {2}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string TestNotImplementedForDictionary() + { + const string Format = @"Test is not implemented for Dictionary."; + return Format; + } + + public static string TestNotImplementedForList() + { + const string Format = @"Test is not implemented for List."; + return Format; + } + + public static string UnableToEvaluatePath(object arg0) + { + const string Format = @"Unable to evaluate path {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ValueCannotBeNull() + { + const string Format = @"Value cannot be null."; + return Format; + } + + public static string ValueNotNull() + { + const string Format = @"Remove requires Value to be null."; + return Format; + } + + public static string JsonPatchNull() + { + const string Format = @"You must pass a valid patch document in the body of the request."; + return Format; + } + + public static string InvalidFieldName(object arg0) + { + const string Format = @"Replace requires {0} to have existing value. Try Add operation instead."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + } +} diff --git a/src/Sdk/Resources/PipelineStrings.g.cs b/src/Sdk/Resources/PipelineStrings.g.cs new file mode 100644 index 00000000000..6701d9c313f --- /dev/null +++ b/src/Sdk/Resources/PipelineStrings.g.cs @@ -0,0 +1,500 @@ +using System.Globalization; + +namespace GitHub.DistributedTask.Pipelines +{ + public static class PipelineStrings + { + + public static string AmbiguousQueueSpecification(object arg0) + { + const string Format = @"The pool name {0} is ambiguous."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string AmbiguousSecureFileSpecification(object arg0) + { + const string Format = @"The secure file name {0} is ambiguous."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string AmbiguousServiceEndpointSpecification(object arg0) + { + const string Format = @"The service connection name {0} is ambiguous."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string AmbiguousTaskSpecification(object arg0, object arg1) + { + const string Format = @"The task name {0} is ambiguous. Specify one of the following identifiers to resolve the ambiguity: {1}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string AmbiguousVariableGroupSpecification(object arg0) + { + const string Format = @"The variable group name {0} is ambiguous."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string AzureKeyVaultTaskName(object arg0) + { + const string Format = @"Download secrets: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerResourceInvalidRegistryEndpointType(object arg0, object arg1, object arg2) + { + const string Format = @"Expected 'dockerregistry' service connection type for image registry referenced by {0}, but got {1} for service connection {2}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string ContainerResourceNotFound(object arg0) + { + const string Format = @"A container resource with name {0} could not be found. The container resource does not exist. If you intended to specify an image, use NAME:TAG or NAME@DIGEST. For example, ubuntu:latest"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerEndpointNotFound(object arg0, object arg1) + { + const string Format = @"Container {0} references service connection {1} which does not exist or is not authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string CheckoutMultipleRepositoryNotSupported() + { + const string Format = @"Checkout of multiple repositories is not supported."; + return Format; + } + + public static string CheckoutStepRepositoryNotSupported(object arg0) + { + const string Format = @"Checkout of repository '{0}' is not supported. Only 'self' and 'none' are supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string CheckoutMustBeTheFirstStep() + { + const string Format = @"Checkout should be the first step in the job."; + return Format; + } + + public static string ExpressionInvalid(object arg0) + { + const string Format = @"'{0}' is not a valid expression. Expressions must be enclosed with '$[' and ']'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string DemandExpansionInvalid(object arg0, object arg1, object arg2) + { + const string Format = @"Demand '{0}' is not valid when '{1}' evaluates to '{2}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string PhaseGraphCycleDetected(object arg0, object arg1) + { + const string Format = @"Job {0} depends on job {1} which creates a cycle in the dependency graph."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string StageGraphCycleDetected(object arg0, object arg1) + { + const string Format = @"Stage {0} depends on stage {1} which creates a cycle in the dependency graph."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string StagePhaseGraphCycleDetected(object arg0, object arg1, object arg2) + { + const string Format = @"Stage {0} job {1} depends on job {2} which creates a cycle in the dependency graph."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string InvalidRegexOptions(object arg0, object arg1) + { + const string Format = @"Provider regex options '{0}' are invalid. Supported combination of flags: `{1}`. Eg: 'IgnoreCase, Multiline', 'Multiline'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidRetryStageNeverRun(object arg0) + { + const string Format = @"Unable to retry stage {0} because it has never been run."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidRetryStageNotComplete(object arg0) + { + const string Format = @"Unable to retry the pipeline because stage {0} is currently in progress."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidTypeForLengthFunction(object arg0) + { + const string Format = @"Kind '{0}' not supported. Only arrays, strings, dictionaries, or collections are supported for the length function."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidValidationOptionNoImplementation(object arg0, object arg1) + { + const string Format = @"The validation option {0} was specified but no implementation was provided for {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseDependencyNotFound(object arg0, object arg1) + { + const string Format = @"Job {0} depends on unknown job {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string StagePhaseDependencyNotFound(object arg0, object arg1, object arg2) + { + const string Format = @"Stage {0} job {1} depends on unknown job {2}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string StageDependencyNotFound(object arg0, object arg1) + { + const string Format = @"Stage {0} depends on unknown stage {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseJobNameInvalidForSlicing(object arg0) + { + const string Format = @"The job name {0} is not valid for the specified execution options. Valid jobs names include JobN or N, where N is a value from 1 to maximum parallelism."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string PhaseJobNumberDoesNotExist(object arg0, object arg1, object arg2) + { + const string Format = @"Job {0} uses a maximum parallelism of {1}. The job {2} does not exist with the specified parallelism settings."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string PhaseJobMatrixExpansionExceedLimit(object arg0, object arg1) + { + const string Format = @"The matrix expansion resulted in {0} jobs which exceeds the maximum allowable job count of {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseJobSlicingExpansionExceedLimit(object arg0, object arg1) + { + const string Format = @"The slicing expansion resulted in {0} jobs which exceeds the maximum allowable job count of {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseMatrixConfigurationDoesNotExist(object arg0, object arg1) + { + const string Format = @"Job {0} does not specify a matrix configuration named {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseNameInvalid(object arg0) + { + const string Format = @"Job {0} has an invalid name. Valid names may only contain alphanumeric characters and '_' and may not start with a number."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StageNameInvalid(object arg0) + { + const string Format = @"Stage {0} has an invalid name. Valid names may only contain alphanumeric characters and '_' and may not start with a number."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StagePhaseNameInvalid(object arg0, object arg1) + { + const string Format = @"Stage {0} job {1} has an invalid name. Valid names may only contain alphanumeric characters and '_' and may not start with a number."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseNamesMustBeUnique(object arg0) + { + const string Format = @"The job name {0} appears more than once. Job names must be unique within a pipeline."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StagePhaseNamesMustBeUnique(object arg0, object arg1) + { + const string Format = @"Stage {0} job {1} appears more than once. Job names must be unique within a stage."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseTargetRequired(object arg0) + { + const string Format = @"Job {0}: Target is required."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StageVariableGroupNotSupported(object arg0, object arg1) + { + const string Format = @"Stage {0}: Variable group reference {1} is not supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PhaseVariableGroupNotSupported(object arg0, object arg1) + { + const string Format = @"Job {0}: Variable group reference {1} is not supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string PipelineNotValid() + { + const string Format = @"The pipeline is not valid."; + return Format; + } + + public static string PipelineNotValidWithErrors(object arg0) + { + const string Format = @"The pipeline is not valid. {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string PipelineNotValidNoStartingPhase() + { + const string Format = @"The pipeline must contain at least one job with no dependencies."; + return Format; + } + + public static string PipelineNotValidNoStartingStage() + { + const string Format = @"The pipeline must contain at least one stage with no dependencies."; + return Format; + } + + public static string StageNotValidNoStartingPhase(object arg0) + { + const string Format = @"Stage {0} must contain at least one job with no dependencies."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string QueueNotDefined() + { + const string Format = @"Either a pool ID or name is required."; + return Format; + } + + public static string QueueNotFound(object arg0) + { + const string Format = @"Could not find a pool with ID {0}. The pool does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string QueueNotFoundByName(object arg0) + { + const string Format = @"Could not find a pool with name {0}. The pool does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string RegexFailed(object arg0, object arg1) + { + const string Format = @"Regular expression failed evaluating '{0}' : {1}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string SecureFileNotFound(object arg0) + { + const string Format = @"A secure file with name {0} could not be found. The secure file does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SecureFileNotFoundForInput(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Job {0}: Step {1} input {2} references secure file {3} which could not be found. The secure file does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string ServiceEndpointNotFound(object arg0) + { + const string Format = @"A service connection with name {0} could not be found. The service connection does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ServiceEndpointNotFoundForInput(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Job {0}: Step {1} input {2} references service connection {3} which could not be found. The service connection does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string StepConditionIsNotValid(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Job {0}: Step {1} specifies condition {2} which is not valid. Reason: {3}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string StepInputEndpointAuthSchemeMismatch(object arg0, object arg1, object arg2, object arg3, object arg4, object arg5, object arg6, object arg7) + { + const string Format = @"Job {0}: Step {1} input {2} expects a service connection of type {3} with authentication scheme {4} but the provided service connection {5} is of type {6} using authentication scheme {7}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7); + } + + public static string StepInputEndpointTypeMismatch(object arg0, object arg1, object arg2, object arg3, object arg4, object arg5) + { + const string Format = @"Job {0}: Step {1} input {2} expects a service connection of type {3} but the provided service connection {4} is of type {5}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3, arg4, arg5); + } + + public static string StepNameInvalid(object arg0, object arg1) + { + const string Format = @"Job {0}: Step {1} has an invalid name. Valid names may only contain alphanumeric characters and '_' and may not start with a number."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string StepNamesMustBeUnique(object arg0, object arg1) + { + const string Format = @"Job {0}: The step name {1} appears more than once. Step names must be unique within a job."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string StepNotSupported() + { + const string Format = @"Only task steps and group steps are supported."; + return Format; + } + + public static string StepTaskInputInvalid(object arg0, object arg1, object arg2, object arg3, object arg4, object arg5) + { + const string Format = @"Job {0}: Step {1} input '{2}' with value '{3}' does not satisfy '{4}': {5}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3, arg4, arg5); + } + + public static string StepTaskReferenceInvalid(object arg0, object arg1) + { + const string Format = @"Job {0}: Step {1} has an invalid task definition reference. A valid task definition reference must specify either an ID or a name and a version specification with a major version specified."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string StepActionReferenceInvalid(object arg0, object arg1) + { + const string Format = @"Job {0}: Step {1} has an invalid action definition reference. A valid action definition reference can be either a container resource or a repository resource."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string TaskInvalidForGivenTarget(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"Job {0}: Step {1} references task '{2}' at version '{3}' which is not valid for the given job target."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string TaskMissing(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"A task is missing. The pipeline references a task called '{2}'. This usually indicates the task isn't installed, and you may be able to install it from the Marketplace: https://marketplace.visualstudio.com. (Task version {3}, job '{0}', step '{1}'.)"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string TaskStepReferenceInvalid(object arg0, object arg1, object arg2) + { + const string Format = @"Job {0}: Step {1} task reference is invalid. {2}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string ActionStepReferenceInvalid(object arg0, object arg1, object arg2) + { + const string Format = @"Job {0}: Step {1} action reference is invalid. {2}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string TaskTemplateNotSupported(object arg0, object arg1) + { + const string Format = @"Task template {0} at version {1} is not supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string TemplateStoreNotProvided(object arg0, object arg1) + { + const string Format = @"Unable to resolve task template {0} because no implementation was provided for {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string UnsupportedTargetType(object arg0, object arg1) + { + const string Format = @"Job {0}: Target {1} is not supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string RepositoryNotSpecified() + { + const string Format = @"The checkout step does not specify a repository"; + return Format; + } + + public static string RepositoryResourceNotFound(object arg0) + { + const string Format = @"The checkout step references the repository '{0}' which is not defined by the pipeline"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string VariableGroupNotFound(object arg0) + { + const string Format = @"Variable group {0} was not found or is not authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string VariableGroupNotFoundForPhase(object arg0, object arg1) + { + const string Format = @"Job {0}: Variable group {1} could not be found. The variable group does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string VariableGroupNotFoundForStage(object arg0, object arg1) + { + const string Format = @"Stage {0}: Variable group {1} could not be found. The variable group does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string JobNameWhenNoNameIsProvided(object arg0) + { + const string Format = @"Job{0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string StageNameWhenNoNameIsProvided(object arg0) + { + const string Format = @"Stage{0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string InvalidAbsoluteRollingValue() + { + const string Format = @"Absolute rolling value should be greater than zero."; + return Format; + } + + public static string InvalidPercentageRollingValue() + { + const string Format = @"Percentage rolling value should be with in 1 to 100."; + return Format; + } + + public static string InvalidRollingOption(object arg0) + { + const string Format = @"{0} is not supported as rolling option."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string EnvironmentNotFound(object arg0, object arg1) + { + const string Format = @"Job {0}: Environment {1} could not be found. The environment does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string EnvironmentRequired(object arg0) + { + const string Format = @"Job {0}: Environment is required."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string EnvironmentResourceNotFound(object arg0, object arg1, object arg2) + { + const string Format = @"Job {0}: Resource {1} does not exist in environment {2}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string StageNamesMustBeUnique(object arg0) + { + const string Format = @"The stage name {0} appears more than once. Stage names must be unique within a pipeline."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ServiceConnectionUsedInVariableGroupNotValid(object arg0, object arg1) + { + const string Format = @"Service connection : {0} used in variable group : {1} is not valid. Either service connection does not exist or has not been authorized for use."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + } +} diff --git a/src/Sdk/Resources/SecurityResources.g.cs b/src/Sdk/Resources/SecurityResources.g.cs new file mode 100644 index 00000000000..e989ee8a67e --- /dev/null +++ b/src/Sdk/Resources/SecurityResources.g.cs @@ -0,0 +1,20 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class SecurityResources + { + + public static string InvalidAclStoreException(object arg0, object arg1) + { + const string Format = @"The ACL store with identifier '{1}' was not found in the security namespace '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string InvalidPermissionsException(object arg0, object arg1) + { + const string Format = @"Invalid operation. Unable to set bits '{1}' in security namespace '{0}' as it is reserved by the system."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + } +} diff --git a/src/Sdk/Resources/TemplateStrings.g.cs b/src/Sdk/Resources/TemplateStrings.g.cs new file mode 100644 index 00000000000..43800c0e41c --- /dev/null +++ b/src/Sdk/Resources/TemplateStrings.g.cs @@ -0,0 +1,140 @@ +using System.Globalization; + +namespace GitHub.DistributedTask.ObjectTemplating +{ + public static class TemplateStrings + { + + public static string DirectiveNotAllowed(object arg0) + { + const string Format = @"The expression directive '{0}' is not supported in this context"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string DirectiveNotAllowedInline(object arg0) + { + const string Format = @"The directive '{0}' is not allowed in this context. Directives are not supported for expressions that are embedded within a string. Directives are only supported when the entire value is an expression."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ExpectedExpression() + { + const string Format = @"An expression was expected"; + return Format; + } + + public static string ExpectedMapping() + { + const string Format = @"Expected a mapping"; + return Format; + } + + public static string ExpectedNParametersFollowingDirective(object arg0, object arg1, object arg2) + { + const string Format = @"Exactly {0} parameter(s) were expected following the directive '{1}'. Actual parameter count: {2}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string ExpectedScalar() + { + const string Format = @"Expected a scalar value"; + return Format; + } + + public static string ExpectedScalarSequenceOrMapping() + { + const string Format = @"Expected a scalar value, a sequence, or a mapping"; + return Format; + } + + public static string ExpectedSequence() + { + const string Format = @"Expected a sequence"; + return Format; + } + + public static string ExpressionNotAllowed() + { + const string Format = @"A template expression is not allowed in this context"; + return Format; + } + + public static string ExpressionNotClosed() + { + const string Format = @"The expression is not closed. An unescaped ${{ sequence was found, but the closing }} sequence was not found."; + return Format; + } + + public static string LineColumn(object arg0, object arg1) + { + const string Format = @"(Line: {0}, Col: {1})"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MaxObjectDepthExceeded() + { + const string Format = @"Maximum object depth exceeded"; + return Format; + } + + public static string MaxObjectSizeExceeded() + { + const string Format = @"Maximum object size exceeded"; + return Format; + } + + public static string MaxTemplateEventsExceeded() + { + const string Format = @"Maximum events exceeded while evaluating the template. This may indicate an infinite loop or too many nested loops."; + return Format; + } + + public static string TemplateNotValid() + { + const string Format = @"The template is not valid."; + return Format; + } + + public static string TemplateNotValidWithErrors(object arg0) + { + const string Format = @"The template is not valid. {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UnableToConvertToTemplateToken(object arg0) + { + const string Format = @"Unable to convert the object to a template token. Actual type '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UnableToDetermineOneOf(object arg0) + { + const string Format = @"There's not enough info to determine what you meant. Add one of these properties: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UnexpectedMappingStart() + { + const string Format = @"A mapping was not expected"; + return Format; + } + + public static string UnexpectedSequenceStart() + { + const string Format = @"A sequence was not expected"; + return Format; + } + + public static string UnexpectedValue(object arg0) + { + const string Format = @"Unexpected value '{0}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ValueAlreadyDefined(object arg0) + { + const string Format = @"'{0}' is already defined"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + } +} diff --git a/src/Sdk/Resources/WebApiResources.g.cs b/src/Sdk/Resources/WebApiResources.g.cs new file mode 100644 index 00000000000..f7a4735cba0 --- /dev/null +++ b/src/Sdk/Resources/WebApiResources.g.cs @@ -0,0 +1,392 @@ +using System.Globalization; + +namespace GitHub.Services.WebApi +{ + public static class WebApiResources + { + + public static string UnsupportedContentType(object arg0) + { + const string Format = @"The server returns content type {0}, which is not supported."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string DownloadCorrupted() + { + const string Format = @"The download file is corrupted. Get the file again."; + return Format; + } + + public static string SerializingPhrase() + { + const string Format = @"being serialized"; + return Format; + } + + public static string DeserializationCorrupt() + { + const string Format = @"The data presented for deserialization to the PropertiesCollection is corrupt."; + return Format; + } + + public static string ClientResourceVersionNotSupported(object arg0, object arg1, object arg2, object arg3) + { + const string Format = @"The server does not support resource {0} at API version {1}. The minimum supported version on {2} is {3}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2, arg3); + } + + public static string ResourceNotFoundOnServerMessage(object arg0, object arg1) + { + const string Format = @"API resource location {0} is not registered on {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ResourceNotRegisteredMessage(object arg0) + { + const string Format = @"API resource location {0} is not registered on this server."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ContainerIdMustBeGreaterThanZero() + { + const string Format = @"The container ID must be greater than zero."; + return Format; + } + + public static string FullyQualifiedLocationParameter() + { + const string Format = @"The value of the location parameter cannot be null if the RelativeToSetting is 'FullyQualified'"; + return Format; + } + + public static string RelativeLocationMappingErrorMessage() + { + const string Format = @"You cannot add location mappings to service definitions that are not part of the FullyQualified type."; + return Format; + } + + public static string InvalidAccessMappingLocationServiceUrl() + { + const string Format = @"The access mapping is not valid and cannot be registered. The location service URL cannot be null or empty."; + return Format; + } + + public static string ServiceDefinitionDoesNotExist(object arg0, object arg1) + { + const string Format = @"The service definition with service type '{0}' and identifier '{1}' does not exist."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ServiceDefinitionWithNoLocations(object arg0) + { + const string Format = @"The service with the following type does not have a location mapping: {0}. You must provide at least one location in order to configure locations for an external service."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string JsonParseError(object arg0) + { + const string Format = @"Unable to parse JSON in: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MissingRequiredParameterMessage(object arg0) + { + const string Format = @"A required parameter {0} was not specified for this request."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ProxyAuthenticationRequired() + { + const string Format = @"SP324097: Your network proxy requires authentication."; + return Format; + } + + public static string InvalidApiVersionStringMessage(object arg0) + { + const string Format = @"Invalid api version string: ""{0}"". Api version string must be in the format: {{Major}}.{{Minor}}[-preview[.{{ResourceVersion}}]]."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ApiResourceDuplicateIdMessage(object arg0) + { + const string Format = @"The following location id has already been registered: {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ApiResourceDuplicateRouteNameMessage(object arg0) + { + const string Format = @"The following route name has already been registered: {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string RequestContentTypeNotSupported(object arg0, object arg1, object arg2) + { + const string Format = @"The request indicated a Content-Type of ""{0}"" for method type ""{1}"" which is not supported. Valid content types for this method are: {2}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string InvalidReferenceLinkFormat() + { + const string Format = @"ReferenceLinks is a dictionary that contains either a single ReferenceLink or an array of ReferenceLinks."; + return Format; + } + + public static string PreviewVersionNotSuppliedMessage(object arg0) + { + const string Format = @"The requested version ""{0}"" of the resource is under preview. The -preview flag must be supplied in the api-version for such requests. For example: ""{0}-preview"""; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string VersionNotSuppliedMessage(object arg0) + { + const string Format = @"No api-version was supplied for the ""{0}"" request. The version must be supplied either as part of the Accept header (e.g. ""application/json; api-version=1.0"") or as a query parameter (e.g. ""?api-version=1.0"")."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MustacheTemplateInvalidEndBlock(object arg0) + { + const string Format = @"Unexpected end block '{0}' before any start block"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MustacheTemplateMissingBlockHelper(object arg0, object arg1) + { + const string Format = @"Block Helper '{0}' not found for expression '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateMissingHelper(object arg0, object arg1) + { + const string Format = @"Helper '{0}' not found for expression '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateNonMatchingEndBlock(object arg0, object arg1) + { + const string Format = @"End block '{0}' does not match start block '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateBraceCountMismatch(object arg0) + { + const string Format = @"The expression '{0}' is invalid due to mismatching start and end brace count."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MustacheTemplateInvalidEndBraces(object arg0, object arg1) + { + const string Format = @"Invalid end braces before start braces at position '{0}' of template '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateInvalidStartBraces(object arg0, object arg1, object arg2) + { + const string Format = @"Invalid start braces within template expression '{0}' at position {1} of template '{2}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1, arg2); + } + + public static string MustacheTemplateInvalidEscapedStringLiteral(object arg0, object arg1) + { + const string Format = @"Invalid escape character in string literal '{0}' within template expression '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateUnterminatedStringLiteral(object arg0, object arg1) + { + const string Format = @"Unterminated string literal '{0}' within template expression '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateInvalidNumericLiteral(object arg0, object arg1) + { + const string Format = @"Invalid numeric literal '{0}' within template expression '{1}'"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string OperationNotFoundException(object arg0) + { + const string Format = @"Failed to find operation '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string OperationPluginNotFoundException(object arg0) + { + const string Format = @"Failed to find operation plugin '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string OperationPluginWithSameIdException(object arg0) + { + const string Format = @"Found several plugins for the id '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string OperationPluginNoPermission(object arg0, object arg1) + { + const string Format = @"The operation '{1}' for the plugin '{0}' doesn't have permission."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string OperationUpdateException(object arg0) + { + const string Format = @"Operation update for operation '{0}' did not complete successfully."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string CollectionDoesNotExistException(object arg0) + { + const string Format = @"Collection with name {0} does not exist."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MissingCloseInlineMessage() + { + const string Format = @"Missing close expression for inline content."; + return Format; + } + + public static string MissingEndingBracesMessage(object arg0) + { + const string Format = @"No ending braces for expression '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string NestedInlinePartialsMessage() + { + const string Format = @"An inline partial cannot contain another inline partial"; + return Format; + } + + public static string GetServiceArgumentError(object arg0) + { + const string Format = @"'{0}' must be a non-abstract class with a public parameterless or default constructor in order to use it as parameter 'T' in GetService()."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ExtensibleServiceTypeNotRegistered(object arg0) + { + const string Format = @"The service type '{0}' does not have a registered implementation or default implementation attribute."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ExtensibleServiceTypeNotValid(object arg0, object arg1) + { + const string Format = @"'{1}' does not extend or implement the service type '{0}'."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ServerDataProviderNotFound(object arg0) + { + const string Format = @"The server data provider for service owner {0} could not be found."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ClientCertificateMissing(object arg0) + { + const string Format = @"No certificate capable of client authentication was found in the certificate store with thumbprint {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string SmartCardMissing(object arg0) + { + const string Format = @"The smart card containing the private key for the certificate with thumbprint {0} is not available."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ClientCertificateNoPermission(object arg0) + { + const string Format = @"The certificate with thumbprint {0} could not be used for client authentication. The current user may not have permission to use the certificate."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string ClientCertificateErrorReadingStore(object arg0) + { + const string Format = @"An exception occurred while loading client authentication certificates from the certificate store: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string CannotAuthenticateAsAnotherUser(object arg0, object arg1) + { + const string Format = @"We were unable to establish the connection because it is configured for user {0} but you attempted to connect using user {1}. To connect as a different user perform a switch user operation. To connect with the configured identity just attempt the last operation again."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string MustacheTemplateInvalidPartialReference(object arg0) + { + const string Format = @"Invalid partial reference: {0}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string CannotGetUnattributedClient(object arg0) + { + const string Format = @"The current VssConnection does not support calling GetClient for this client type: '{0}'. Instead, use the GetClient overload which accepts a serviceIdentifier parameter to specify the intended target service for the given client."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UnknownEntityType(object arg0) + { + const string Format = @"Unknown entityType {0}. Cannot parse."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string GraphGroupMissingRequiredFields() + { + const string Format = @"Must have exactly one of originId, principlaName or displayName set."; + return Format; + } + + public static string GraphUserMissingRequiredFields() + { + const string Format = @"Must have exactly one of originId or principlaName set."; + return Format; + } + + public static string MustacheEvaluationResultLengthExceeded(object arg0) + { + const string Format = @"The maximum evaluation result length has been exceeded. The maximum allowed length is {0:N0} characters."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string MustacheTemplateInlinePartialsNotAllowed() + { + const string Format = @"Inline partial expressions are not allowed"; + return Format; + } + + public static string MustacheTemplateMaxDepthExceeded(object arg0) + { + const string Format = @"The maximum expression depth has been exceeded. The maximum allowed expression depth is {0}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + + public static string UnexpectedTokenType() + { + const string Format = @"Unexpected token type. Only JObject, JArrays, Guid, String and Boolean are supported."; + return Format; + } + + public static string ApiVersionOutOfRange(object arg0, object arg1) + { + const string Format = @"The requested REST API version of {0} is out of range for this server. The latest REST API version this server supports is {1}."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ApiVersionOutOfRangeForRoute(object arg0, object arg1) + { + const string Format = @"The request matched route {1}, but the requested REST API version {0} was outside the valid version range for this route."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string ApiVersionOutOfRangeForRoutes(object arg0, object arg1) + { + const string Format = @"The following routes matched, but the requested REST API version {0} was outside the valid version ranges: {1}"; + return string.Format(CultureInfo.CurrentCulture, Format, arg0, arg1); + } + + public static string UnsafeCrossOriginRequest(object arg0) + { + const string Format = @"A cross-origin request from origin ""{0}"" is not allowed when using cookie-based authentication. An authentication token needs to be provided in the Authorization header of the request."; + return string.Format(CultureInfo.CurrentCulture, Format, arg0); + } + } +} diff --git a/src/Sdk/Sdk.csproj b/src/Sdk/Sdk.csproj new file mode 100644 index 00000000000..13323616ce4 --- /dev/null +++ b/src/Sdk/Sdk.csproj @@ -0,0 +1,38 @@ + + + + netcoreapp2.2 + Library + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603 + $(Version) + NETSTANDARD;NET_STANDARD;TRACE + 7.3 + true + + + + + + + + + + + + + + + + + + + + + + GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json + + + diff --git a/src/Sdk/WebApi/WebApi/Attributes.cs b/src/Sdk/WebApi/WebApi/Attributes.cs new file mode 100644 index 00000000000..375d8debe65 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Attributes.cs @@ -0,0 +1,177 @@ +using System; +using GitHub.Services.WebApi.Internal; + +namespace GitHub.Services.WebApi +{ + /// + /// This attribute provides the location service area identifier in order to target the location service + /// instance which has the service definitions for the HTTP resources in the specified service area. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Interface, AllowMultiple = false)] + public sealed class ResourceAreaAttribute : Attribute + { + public ResourceAreaAttribute(String areaId) + { + this.AreaId = new Guid(areaId); + } + + public readonly Guid AreaId; + } + + /// + /// Use in conjunction with JsonCompatConverter. This attribute describes a model property or field change at a particular API version. + /// + [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = true)] + public sealed class CompatPropertyAttribute : Attribute + { + /// + /// This attribute describes a model property or field change at a particular API version. + /// + /// Old name of the serialized property. + /// The major version component of the max version of the api to support the old property name. + /// The minor version component of the max version of the api to support the old property name. + public CompatPropertyAttribute(String oldName, Int32 majorApiVersion, Int32 minorApiVersion = 0) + { + OldName = oldName; + MaxApiVersion = new Version(majorApiVersion, minorApiVersion); + } + + /// + /// Old name of the serialized property. + /// + public String OldName { get; private set; } + + /// + /// The max version of the api to support the old property name. + /// + public Version MaxApiVersion { get; private set; } + } + + /// + /// This tells the client generator to set this property to the content of the repsonse + /// + [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)] + public sealed class ClientResponseContentAttribute : Attribute + { + public ClientResponseContentAttribute() + { + } + } + + /// + /// This tells the client generator to set this property to the header value from the response. This should only be added to types of IEnumerable<String> + /// + [AttributeUsage(AttributeTargets.Property, AllowMultiple = false)] + public sealed class ClientResponseHeaderAttribute : Attribute + { + public ClientResponseHeaderAttribute(string headerName) + { + HeaderName = headerName; + } + + public string HeaderName { get; private set; } + } + + /// + /// Tells the client generator to create meta data for this model, even if it is not referenced directly or indirectly from the client. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Enum | AttributeTargets.Struct, AllowMultiple = false)] + public sealed class ClientIncludeModelAttribute : Attribute + { + public ClientIncludeModelAttribute() + { + Languages = RestClientLanguages.All; + } + + public ClientIncludeModelAttribute(RestClientLanguages languages) + { + Languages = languages; + } + + public RestClientLanguages Languages { get; } + } + + /// + /// Marks a class, method or property for internal use only. This attribute ensures the item + /// does not show up in public documentation, adds EditorBrowsableState.Never in C# clients + /// to hide the item, and optionaly adds @internal in TypeScript clients which removes the + /// item from the TypeScript declare (d.ts) file. This does not exempt this API from the + /// formal REST Api review process. Our internal APIs must meet the same standards and + /// guidelines as our public APIs. + /// + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Property | AttributeTargets.Field, AllowMultiple = false)] + public sealed class ClientInternalUseOnlyAttribute : Attribute + { + /// + /// Marks a class, method or property for internal use only. This attribute ensures the item + /// does not show up in public documentation, adds EditorBrowsableState.Never in C# clients + /// to hide the item, and optionaly adds @internal in TypeScript clients which removes the + /// item from the TypeScript declare (d.ts) file. This does not exempt this API from the + /// formal REST Api review process. Our internal APIs must meet the same standards and + /// guidelines as our public APIs. + /// + /// Default is true. Set to false if you need the item to appear in the TypeScript declare (d.ts) file for use by extensions. + public ClientInternalUseOnlyAttribute(bool omitFromTypeScriptDeclareFile = true) + { + OmitFromTypeScriptDeclareFile = omitFromTypeScriptDeclareFile; + } + + /// + /// Set to false if you need the item to appear in the TypeScript declare (d.ts) file for use by extensions. + /// + public bool OmitFromTypeScriptDeclareFile { get; set; } + } + + [AttributeUsage(AttributeTargets.Class, AllowMultiple = false)] + public sealed class ClientCircuitBreakerSettingsAttribute : Attribute + { + public ClientCircuitBreakerSettingsAttribute(int timeoutSeconds, int failurePercentage) + { + Timeout = TimeSpan.FromSeconds(timeoutSeconds); + ErrorPercentage = failurePercentage; + } + + /// + /// Timeout in seconds + /// + public TimeSpan Timeout { get; private set; } + + /// + /// Percentage of failed commands + /// + public int ErrorPercentage { get; private set; } + + /// + /// Number of max concurrent requests + /// + public int MaxConcurrentRequests { get; set; } + } + + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Property, AllowMultiple = false)] + public sealed class ClientCancellationTimeoutAttribute : Attribute + { + public ClientCancellationTimeoutAttribute(int timeoutSeconds) + { + Timeout = TimeSpan.FromSeconds(timeoutSeconds); + } + + /// + /// Timeout in seconds for request cancellation + /// + public TimeSpan Timeout { get; private set; } + } + + /// + /// Indicates which headers are considered to contain sensitive information by a particular HttpClient. + /// + [AttributeUsage(AttributeTargets.Class)] + public sealed class ClientSensitiveHeaderAttribute : Attribute + { + public string HeaderName { get; set; } + + public ClientSensitiveHeaderAttribute(string headerName) + { + HeaderName = headerName; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Constants.cs b/src/Sdk/WebApi/WebApi/Constants.cs new file mode 100644 index 00000000000..a94c7623055 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Constants.cs @@ -0,0 +1,101 @@ +using GitHub.Services.Common; +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi +{ + // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + // This will not be like MS.TF.Framework.Common! + // If your service does not ship in SPS or the Framework SDK you cannot put your stuff here! + // It goes in your own assembly! + // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + [GenerateAllConstants] + public static class ServiceInstanceTypes + { + // !!!!!!!!!!!!!!!!!! + // This class is sealed to new guids -- please define your instance type constant in your own assembly + // !!!!!!!!!!!!!!!!!! + + public const String MPSString = "00000000-0000-8888-8000-000000000000"; + public static readonly Guid MPS = new Guid(MPSString); + + public const String SPSString = "951917AC-A960-4999-8464-E3F0AA25B381"; + public static readonly Guid SPS = new Guid(SPSString); + + public const String TFSString = "00025394-6065-48CA-87D9-7F5672854EF7"; + public static readonly Guid TFS = new Guid(TFSString); + + public const String TFSOnPremisesString = "87966EAA-CB2A-443F-BE3C-47BD3B5BF3CB"; + public static readonly Guid TFSOnPremises = new Guid(TFSOnPremisesString); + + [Obsolete] + public const String SpsExtensionString = "00000024-0000-8888-8000-000000000000"; + [Obsolete] + public static readonly Guid SpsExtension = new Guid(SpsExtensionString); + + public const String SDKSampleString = "FFFFFFFF-0000-8888-8000-000000000000"; + public static readonly Guid SDKSample = new Guid(SDKSampleString); + + // !!!!!!!!!!!!!!!!!! + // This class is sealed to new guids -- please define your instance type constant in your own assembly + // !!!!!!!!!!!!!!!!!! + } + + /// + /// Enumeration of the options that can be passed in on Connect. + /// + [DataContract] + [Flags] + public enum ConnectOptions + { + /// + /// Retrieve no optional data. + /// + [EnumMember] + None = 0, + + /// + /// Includes information about AccessMappings and ServiceDefinitions. + /// + [EnumMember] + IncludeServices = 1, + + /// + /// Includes the last user access for this host. + /// + [EnumMember] + IncludeLastUserAccess = 2, + + /// + /// This is only valid on the deployment host and when true. Will only return + /// inherited definitions. + /// + [EnumMember] + [EditorBrowsable(EditorBrowsableState.Never)] + IncludeInheritedDefinitionsOnly = 4, + + /// + /// When true will only return non inherited definitions. + /// Only valid at non-deployment host. + /// + [EnumMember] + [EditorBrowsable(EditorBrowsableState.Never)] + IncludeNonInheritedDefinitionsOnly = 8, + } + + [DataContract] + [Flags] + public enum DeploymentFlags + { + [EnumMember] + None = 0x0, + + [EnumMember] + Hosted = 0x1, + + [EnumMember] + OnPremises = 0x2 + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Common/SocialDescriptor.cs b/src/Sdk/WebApi/WebApi/Contracts/Common/SocialDescriptor.cs new file mode 100644 index 00000000000..ea7daa83d73 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Common/SocialDescriptor.cs @@ -0,0 +1,345 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.Graph; +using System.Xml.Serialization; +using System.Xml; +using System.Xml.Schema; +using GitHub.Services.WebApi; +using System.Linq; +using System.Reflection; +using System.ComponentModel; +using System.Globalization; + +namespace GitHub.Services.Common +{ + [TypeConverter(typeof(SocialDescriptorConverter))] + public struct SocialDescriptor : IEquatable, IXmlSerializable + { + public SocialDescriptor(string socialType, string identifier) + { + ValidateSocialType(socialType); + ValidateIdentifier(identifier); + + SocialType = NormalizeSocialType(socialType); + Identifier = identifier; + } + + [DataMember] + public string SocialType { get; private set; } + + [DataMember] + public string Identifier { get; private set; } + + public override string ToString() + { + if (this == default(SocialDescriptor)) + { + return null; + } + + return string.Concat( + Constants.SocialDescriptorPrefix, + SocialType, + Constants.SocialDescriptorPartsSeparator, + PrimitiveExtensions.ToBase64StringNoPaddingFromString(Identifier)); + } + + public static SocialDescriptor FromString(string socialDescriptorString) + { + if (string.IsNullOrEmpty(socialDescriptorString)) + { + return default(SocialDescriptor); + } + + if (!socialDescriptorString.StartsWith(Constants.SocialDescriptorPrefix)) + { + return new SocialDescriptor(Constants.SocialType.Unknown, socialDescriptorString); + } + + if (socialDescriptorString.Length < Constants.SocialDescriptorPolicies.MinSocialDescriptorStringLength) + { + return new SocialDescriptor(Constants.SocialType.Unknown, socialDescriptorString); + } + + var tokens = socialDescriptorString.Split(new char[] { Constants.SocialDescriptorPartsSeparator }, 3); + if (tokens.Length != 2) + { + return new SocialDescriptor(Constants.SocialType.Unknown, socialDescriptorString); + } + + string moniker = tokens[0].Substring(1); + string identifier = tokens[1]; + + try + { + return new SocialDescriptor(moniker, PrimitiveExtensions.FromBase64StringNoPaddingToString(identifier)); + } + catch { } + + return new SocialDescriptor(Constants.SocialType.Unknown, socialDescriptorString); + } + + /// + /// Parses a string of comma separated social descriptors into a enumerable list of objects. + /// + /// empty enumerable if parameter 'descriptors' is null or empty + public static IEnumerable FromCommaSeperatedStrings(string descriptors) + { + if (string.IsNullOrEmpty(descriptors)) + { + return Enumerable.Empty(); + } + + return descriptors.Split(Constants.SocialListSeparator).Where(descriptor => !string.IsNullOrEmpty(descriptor)).Select(descriptor => FromString(descriptor)); + } + + #region Equality and Compare + + #region Implement IEquatable to avoid boxing + public bool Equals(SocialDescriptor socialDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(SocialType, socialDescriptor.SocialType) && + StringComparer.Ordinal.Equals(Identifier, socialDescriptor.Identifier); // The Social Identifier can be case sensitive, hence avoiding the case ignore check + } + #endregion + + public override bool Equals(object obj) + { + return obj is SocialDescriptor && this == (SocialDescriptor)obj; + } + + public override int GetHashCode() + { + if (this == default(SocialDescriptor)) + { + return 0; + } + + int hashCode = 7443; // "large" prime to start the seed + + // Bitshifting and subtracting once is an efficient way to multiply by our second "large" prime, 0x7ffff = 524287 + hashCode = (hashCode << 19) - hashCode + StringComparer.OrdinalIgnoreCase.GetHashCode(SocialType); + hashCode = (hashCode << 19) - hashCode + StringComparer.Ordinal.GetHashCode(Identifier); + + return hashCode; + } + + public static bool operator ==(SocialDescriptor left, SocialDescriptor right) + { + return left.Equals(right); + } + + public static bool operator !=(SocialDescriptor left, SocialDescriptor right) + { + return !left.Equals(right); + } + + public static implicit operator string(SocialDescriptor socialDescriptor) + { + return socialDescriptor.ToString(); + } + + internal static int Compare(SocialDescriptor left, SocialDescriptor right) + { + int retValue = StringComparer.OrdinalIgnoreCase.Compare(left.SocialType, right.SocialType); + + if (0 == retValue) + { + retValue = StringComparer.Ordinal.Compare(left.Identifier, right.Identifier); + } + + return retValue; + } + + private static string NormalizeSocialType(String socialType) + { + // Look up the string in the static dictionary. If we get a hit, then + // we'll use that string for the social type instead. This saves memory + // as well as improves compare/equals performance when comparing descriptors, + // since Object.ReferenceEquals will return true a lot more often + if (!Constants.SocialTypeMap.TryGetValue(socialType, out string normalizedSocialType)) + { + normalizedSocialType = socialType; + } + + return normalizedSocialType; + } + #endregion + + #region Validation + //Copied from TFCommonUtil.cs + private static void ValidateSocialType(string socialType) + { + if (string.IsNullOrEmpty(socialType)) + { + throw new ArgumentNullException(nameof(socialType)); + } + + if (socialType.Length < Constants.SocialDescriptorPolicies.MinSocialTypeLength || socialType.Length > Constants.SocialDescriptorPolicies.MaxSocialTypeLength) + { + throw new ArgumentOutOfRangeException(nameof(socialType), socialType, GraphResources.SubjectTypeLengthOutOfRange()); + } + } + + private static void ValidateIdentifier(string identifier) + { + if (string.IsNullOrEmpty(identifier)) + { + throw new ArgumentNullException(nameof(identifier)); + } + } + + #endregion + + #region XML Serialization + XmlSchema IXmlSerializable.GetSchema() { return null; } + + void IXmlSerializable.ReadXml(XmlReader reader) + { + ArgumentUtility.CheckForNull(reader, nameof(reader)); + + var isEmptyElement = reader.IsEmptyElement; + + reader.ReadStartElement(); + + if (isEmptyElement) + { + return; + } + + if (reader.NodeType == XmlNodeType.Text) + { + var sourceDescriptor = FromString(reader.ReadContentAsString()); + SocialType = sourceDescriptor.SocialType; + Identifier = sourceDescriptor.Identifier; + } + else + { + while (reader.IsStartElement()) + { + switch (reader.Name) + { + case nameof(SocialType): + var socialType = reader.ReadElementContentAsString(); + ValidateSocialType(socialType); + SocialType = socialType; + break; + case nameof(Identifier): + var identifier = reader.ReadElementContentAsString(); + ValidateIdentifier(identifier); + Identifier = identifier; + break; + default: + reader.ReadOuterXml(); + break; + } + } + } + + reader.ReadEndElement(); + } + + void IXmlSerializable.WriteXml(XmlWriter writer) + { + ArgumentUtility.CheckForNull(writer, nameof(writer)); + + if (Equals(default(SocialDescriptor))) + { + return; + } + + writer.WriteElementString(nameof(SocialType), SocialType); + writer.WriteElementString(nameof(Identifier), Identifier); + } + #endregion + } + + public class SocialDescriptorComparer : IComparer, IEqualityComparer + { + private SocialDescriptorComparer() { } + + public int Compare(SocialDescriptor left, SocialDescriptor right) + { + return SocialDescriptor.Compare(left, right); + } + + public bool Equals(SocialDescriptor left, SocialDescriptor right) + { + return left == right; + } + + public int GetHashCode(SocialDescriptor socialDescriptor) + { + return socialDescriptor.GetHashCode(); + } + + public static SocialDescriptorComparer Instance { get; } = new SocialDescriptorComparer(); + } + + public static class SocialDescriptorExtensions + { + public static bool IsGitHubSocialType(this SocialDescriptor socialDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(socialDescriptor.SocialType ?? String.Empty, Constants.SocialType.GitHub); + } + + public static bool IsSocialType(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.ToString().StartsWith(Constants.SocialDescriptorPrefix); + } + } + + /// + /// Converter to support data contract serialization. + /// + /// + /// This class should only be used to convert a descriptor string from the client back into a string + /// tuple SocialDescriptor type on the server. The client should be unaware that this tuple relationship exists + /// and this should not permit that relationship to leak to the client. + /// + /// Specifically, this is provided so that the MVC router can convert a string => SocialDescriptor so + /// that we can use the [ClientParameterType(typeof(string))] SocialDescriptor socialDescriptor) convenience in each + /// controller method. + /// + public class SocialDescriptorConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) + { + return sourceType == typeof(string) || base.CanConvertFrom(context, sourceType); + } + + public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) + { + return destinationType == typeof(string) || base.CanConvertTo(context, destinationType); + } + + public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value) + { + if (value is string) + { + return SocialDescriptor.FromString((string)value); + } + + return base.ConvertFrom(context, culture, value); + } + + public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) + { + if (destinationType == typeof(string) && value is SocialDescriptor) + { + SocialDescriptor socialDescriptor = (SocialDescriptor)value; + if (socialDescriptor == default(SocialDescriptor)) + { + // socialDescriptor.ToString() returns null in the case of default(SocialDescriptor) + // and null can not be deserialized when the object is a struct. + return string.Empty; + } + + return socialDescriptor.ToString(); + } + + return base.ConvertTo(context, culture, value, destinationType); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Common/SubjectDescriptor.cs b/src/Sdk/WebApi/WebApi/Contracts/Common/SubjectDescriptor.cs new file mode 100644 index 00000000000..b931f222aab --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Common/SubjectDescriptor.cs @@ -0,0 +1,519 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.Graph; +using System.Xml.Serialization; +using System.Xml; +using System.Xml.Schema; +using GitHub.Services.WebApi; +using System.Linq; +using System.Reflection; +using System.ComponentModel; +using System.Globalization; + +namespace GitHub.Services.Common +{ + [TypeConverter(typeof(SubjectDescriptorConverter))] + public struct SubjectDescriptor : IEquatable, IXmlSerializable + { + public SubjectDescriptor(string subjectType, string identifier) + { + ValidateSubjectType(subjectType); + ValidateIdentifier(identifier); + + SubjectType = NormalizeSubjectType(subjectType); + Identifier = identifier; + } + + [DataMember] + public string SubjectType { get; private set; } + + [DataMember] + public string Identifier { get; private set; } + + public override string ToString() + { + if (this == default(SubjectDescriptor)) + { + return null; + } + + return string.Concat( + SubjectType, + Constants.SubjectDescriptorPartsSeparator, + PrimitiveExtensions.ToBase64StringNoPaddingFromString(Identifier)); + } + + public static SubjectDescriptor FromString(string subjectDescriptorString) + { + if (string.IsNullOrEmpty(subjectDescriptorString)) + { + return default(SubjectDescriptor); + } + + if (subjectDescriptorString.Length < Constants.SubjectDescriptorPolicies.MinSubjectDescriptorStringLength) + { + return new SubjectDescriptor(Constants.SubjectType.Unknown, subjectDescriptorString); + } + + int splitIndex = subjectDescriptorString.IndexOf(Constants.SubjectDescriptorPartsSeparator, Constants.SubjectDescriptorPolicies.MinSubjectTypeLength, 3); + + // Either the separator is not there, or it's before the MinSubjectTypeLength or it's at the end the string; either way it's wrong. + if (splitIndex < 3 || splitIndex == subjectDescriptorString.Length - 1) + { + return new SubjectDescriptor(Constants.SubjectType.Unknown, subjectDescriptorString); + } + + string moniker = subjectDescriptorString.Substring(0, splitIndex); + string identifier = subjectDescriptorString.Substring(splitIndex + 1); + + try + { + return new SubjectDescriptor(moniker, PrimitiveExtensions.FromBase64StringNoPaddingToString(identifier)); + } + catch { } + + return new SubjectDescriptor(Constants.SubjectType.Unknown, subjectDescriptorString); + } + + /// + /// Parses a string of comma separated subject descriptors into a enumerable list of objects. + /// + /// empty enumerable if parameter 'descriptors' is null or empty + public static IEnumerable FromCommaSeperatedStrings(string descriptors) + { + if (string.IsNullOrEmpty(descriptors)) + { + return Enumerable.Empty(); + } + + return descriptors.Split(',').Where(descriptor => !string.IsNullOrEmpty(descriptor)).Select(descriptor => FromString(descriptor)); + } + + #region Equality and Compare + + #region Implement IEquatable to avoid boxing + public bool Equals(SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(SubjectType, subjectDescriptor.SubjectType) && + StringComparer.OrdinalIgnoreCase.Equals(Identifier, subjectDescriptor.Identifier); + } + #endregion + + public override bool Equals(object obj) + { + return obj is SubjectDescriptor && this == (SubjectDescriptor)obj; + } + + public override int GetHashCode() + { + if (this == default(SubjectDescriptor)) + { + return 0; + } + + int hashCode = 7443; // "large" prime to start the seed + + // Bitshifting and subtracting once is an efficient way to multiply by our second "large" prime, 0x7ffff = 524287 + hashCode = (hashCode << 19) - hashCode + StringComparer.OrdinalIgnoreCase.GetHashCode(SubjectType); + hashCode = (hashCode << 19) - hashCode + StringComparer.OrdinalIgnoreCase.GetHashCode(Identifier); + + return hashCode; + } + + public static bool operator ==(SubjectDescriptor left, SubjectDescriptor right) + { + return left.Equals(right); + } + + public static bool operator !=(SubjectDescriptor left, SubjectDescriptor right) + { + return !left.Equals(right); + } + + public static implicit operator string(SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.ToString(); + } + + internal static int Compare(SubjectDescriptor left, SubjectDescriptor right) + { + int retValue = StringComparer.OrdinalIgnoreCase.Compare(left.SubjectType, right.SubjectType); + + if (0 == retValue) + { + retValue = StringComparer.OrdinalIgnoreCase.Compare(left.Identifier, right.Identifier); + } + + return retValue; + } + + private static string NormalizeSubjectType(String subjectType) + { + // Look up the string in the static dictionary. If we get a hit, then + // we'll use that string for the subject type instead. This saves memory + // as well as improves compare/equals performance when comparing descriptors, + // since Object.ReferenceEquals will return true a lot more often + if (!Constants.SubjectTypeMap.TryGetValue(subjectType, out string normalizedSubjectType)) + { + normalizedSubjectType = subjectType; + } + + return normalizedSubjectType; + } + #endregion + + #region Validation + //Copied from TFCommonUtil.cs + private static void ValidateSubjectType(string subjectType) + { + if (string.IsNullOrEmpty(subjectType)) + { + throw new ArgumentNullException(nameof(subjectType)); + } + + if (subjectType.Length < Constants.SubjectDescriptorPolicies.MinSubjectTypeLength || subjectType.Length > Constants.SubjectDescriptorPolicies.MaxSubjectTypeLength) + { + throw new ArgumentOutOfRangeException(nameof(subjectType), subjectType, GraphResources.SubjectTypeLengthOutOfRange()); + } + } + + private static void ValidateIdentifier(string identifier) + { + if (string.IsNullOrEmpty(identifier)) + { + throw new ArgumentNullException(nameof(identifier)); + } + + if (identifier.Length > Constants.SubjectDescriptorPolicies.MaxIdentifierLength) + { + throw new ArgumentOutOfRangeException(nameof(identifier), identifier, GraphResources.IdentifierLengthOutOfRange()); + } + } + + #endregion + + #region XML Serialization + XmlSchema IXmlSerializable.GetSchema() { return null; } + + void IXmlSerializable.ReadXml(XmlReader reader) + { + ArgumentUtility.CheckForNull(reader, nameof(reader)); + + var isEmptyElement = reader.IsEmptyElement; + + reader.ReadStartElement(); + + if (isEmptyElement) + { + return; + } + + if (reader.NodeType == XmlNodeType.Text) + { + var sourceDescriptor = FromString(reader.ReadContentAsString()); + SubjectType = sourceDescriptor.SubjectType; + Identifier = sourceDescriptor.Identifier; + } + else + { + while (reader.IsStartElement()) + { + switch (reader.Name) + { + case nameof(SubjectType): + var subjectType = reader.ReadElementContentAsString(); + ValidateSubjectType(subjectType); + SubjectType = subjectType; + break; + case nameof(Identifier): + var identifier = reader.ReadElementContentAsString(); + ValidateIdentifier(identifier); + Identifier = identifier; + break; + default: + reader.ReadOuterXml(); + break; + } + } + } + + reader.ReadEndElement(); + } + + void IXmlSerializable.WriteXml(XmlWriter writer) + { + ArgumentUtility.CheckForNull(writer, nameof(writer)); + + if (Equals(default(SubjectDescriptor))) + { + return; + } + + writer.WriteElementString(nameof(SubjectType), SubjectType); + writer.WriteElementString(nameof(Identifier), Identifier); + } + #endregion + } + + public class SubjectDescriptorComparer : IComparer, IEqualityComparer + { + private SubjectDescriptorComparer() { } + + public int Compare(SubjectDescriptor left, SubjectDescriptor right) + { + return SubjectDescriptor.Compare(left, right); + } + + public bool Equals(SubjectDescriptor left, SubjectDescriptor right) + { + return left == right; + } + + public int GetHashCode(SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.GetHashCode(); + } + + public static SubjectDescriptorComparer Instance { get; } = new SubjectDescriptorComparer(); + } + + // Keep this in sync with the IdentityDescriptorExtensions to avoid extra casting/conversions + public static class SubjectDescriptorExtensions + { + internal static Guid GetMasterScopeId(this SubjectDescriptor subjectDescriptor) + { + if (!subjectDescriptor.IsGroupScopeType()) + { + throw new InvalidSubjectTypeException(subjectDescriptor.SubjectType); + } + + if (!Guid.TryParse(subjectDescriptor.Identifier, out Guid masterScopeId)) + { + throw new ArgumentException($"Parameter {nameof(subjectDescriptor)} does not have a valid master scope ID"); + } + + return masterScopeId; + } + + internal static Guid GetCuid(this SubjectDescriptor subjectDescriptor) + { + if (!subjectDescriptor.IsCuidBased()) + { + throw new InvalidSubjectTypeException(subjectDescriptor.SubjectType); + } + + if (!Guid.TryParse(subjectDescriptor.Identifier, out Guid cuid)) + { + throw new ArgumentException($"Parameter {nameof(subjectDescriptor)} does not have a valid CUID"); + } + + return cuid; + } + + public static bool IsWindowsType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.WindowsIdentity); + } + + public static bool IsGroupType(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.IsAadGroupType() || subjectDescriptor.IsVstsGroupType(); + } + + public static bool IsAadGroupType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.AadGroup); + } + + public static bool IsVstsGroupType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.VstsGroup); + } + + public static bool IsClaimsUserType(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.IsAadUserType() || subjectDescriptor.IsMsaUserType(); + } + + public static bool IsAadUserType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.AadUser); + } + + public static bool IsMsaUserType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.MsaUser); + } + + public static bool IsBindPendingUserType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.BindPendingUser); + } + + public static bool IsUnauthenticatedIdentityType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.UnauthenticatedIdentity); + } + + public static bool IsServiceIdentityType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.ServiceIdentity); + } + + public static bool IsAggregateIdentityType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.AggregateIdentity); + } + + public static bool IsImportedIdentityType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.ImportedIdentity); + } + + public static bool IsGroupScopeType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.GroupScopeType); + } + + public static bool IsServerTestIdentityType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.ServerTestIdentity); + } + + // ******* All types below this line are not backed by the graph or identity service ************************ + public static bool IsSystemServicePrincipalType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.SystemServicePrincipal); + } + + public static bool IsSystemScopeType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.SystemScope); + } + + public static bool IsSystemCspPartnerType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.SystemCspPartner); + } + + public static bool IsSystemLicenseType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.SystemLicense); + } + + public static bool IsSystemPublicAccessType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.SystemPublicAccess); + } + + public static bool IsSystemAccessControlType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.SystemAccessControl); + } + + public static bool IsSystemType(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.IsSystemServicePrincipalType() || + subjectDescriptor.IsSystemScopeType() || + subjectDescriptor.IsSystemLicenseType() || + subjectDescriptor.IsSystemCspPartnerType() || + subjectDescriptor.IsSystemPublicAccessType() || + subjectDescriptor.IsSystemAccessControlType(); + } + + public static bool IsSubjectStoreType(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.IsSystemServicePrincipalType() || + subjectDescriptor.IsSystemScopeType() || + subjectDescriptor.IsSystemLicenseType() || + subjectDescriptor.IsSystemCspPartnerType(); + } + + public static bool IsCspPartnerIdentityType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.CspPartnerIdentity); + } + + public static bool IsUnknownSubjectType(this SubjectDescriptor subjectDescriptor) + { + return StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.Unknown) || + StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.UnknownGroup) || + StringComparer.OrdinalIgnoreCase.Equals(subjectDescriptor.SubjectType, Constants.SubjectType.UnknownUser); + } + + public static bool IsCuidBased(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.IsClaimsUserType() || subjectDescriptor.IsCspPartnerIdentityType(); + } + + public static bool IsUserType(this SubjectDescriptor subjectDescriptor) + { + return subjectDescriptor.IsClaimsUserType() || + subjectDescriptor.IsCspPartnerIdentityType() || + subjectDescriptor.IsBindPendingUserType() || + subjectDescriptor.IsServiceIdentityType(); + } + + public static bool IsPubliclyAvailableGraphSubjectType(this SubjectDescriptor subjectDescriptor) + { + return (subjectDescriptor == default(SubjectDescriptor)) || + subjectDescriptor.IsUserType() || + subjectDescriptor.IsGroupType() || + subjectDescriptor.IsGroupScopeType(); + } + } + + /// + /// Converter to support data contract serialization. + /// + /// + /// This class should only be used to convert a descriptor string from the client back into a string + /// tuple SubjectDescriptor type on the server. The client should be unaware that this tuple relationship exists + /// and this should not permit that relationship to leak to the client. + /// + /// Specifically, this is provided so that the MVC router can convert a string => SubjectDescriptor so + /// that we can use the [ClientParameterType(typeof(string))] SubjectDescriptor userDescriptor) convenience in each + /// controller method. + /// + public class SubjectDescriptorConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) + { + return sourceType == typeof(string) || base.CanConvertFrom(context, sourceType); + } + + public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) + { + return destinationType == typeof(string) || base.CanConvertTo(context, destinationType); + } + + public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value) + { + if (value is string) + { + return SubjectDescriptor.FromString((string)value); + } + + return base.ConvertFrom(context, culture, value); + } + + public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) + { + if (destinationType == typeof(string) && value is SubjectDescriptor) + { + SubjectDescriptor subjectDescriptor = (SubjectDescriptor)value; + if (subjectDescriptor == default(SubjectDescriptor)) + { + // subjectDescriptor.ToString() returns null in the case of default(SubjectDescriptor) + // and null can not be deserialized when the object is a struct. + return string.Empty; + } + + return subjectDescriptor.ToString(); + } + + return base.ConvertTo(context, culture, value, destinationType); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AccessTokenResult.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AccessTokenResult.cs new file mode 100644 index 00000000000..8e119b60e1c --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AccessTokenResult.cs @@ -0,0 +1,32 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.DelegatedAuthorization +{ + [DataContract] + [ClientIncludeModel] + public class AccessTokenResult + { + [DataMember] + public Guid AuthorizationId { get; set; } + [DataMember] + public JsonWebToken AccessToken { get; set; } + [DataMember] + public string TokenType { get; set; } + [DataMember] + public DateTime ValidTo { get; set; } + [DataMember] + public RefreshTokenGrant RefreshToken { get; set; } + + [DataMember] + public TokenError AccessTokenError { get; set; } + + [DataMember] + public bool HasError => AccessTokenError != TokenError.None; + + [DataMember] + public string ErrorDescription { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrant.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrant.cs new file mode 100644 index 00000000000..e350e9064da --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrant.cs @@ -0,0 +1,26 @@ +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.DelegatedAuthorization +{ + [KnownType(typeof(RefreshTokenGrant))] + [KnownType(typeof(JwtBearerAuthorizationGrant))] + [JsonConverter(typeof(AuthorizationGrantJsonConverter))] + public abstract class AuthorizationGrant + { + public AuthorizationGrant(GrantType grantType) + { + if (grantType == GrantType.None) + { + throw new ArgumentException("Grant type is required."); + } + + GrantType = grantType; + } + + [JsonConverter(typeof(StringEnumConverter))] + public GrantType GrantType { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrantJsonConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrantJsonConverter.cs new file mode 100644 index 00000000000..967c27f2371 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/AuthorizationGrantJsonConverter.cs @@ -0,0 +1,50 @@ +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Jwt; +using Newtonsoft.Json.Linq; +using System; + +namespace GitHub.Services.DelegatedAuthorization +{ + public class AuthorizationGrantJsonConverter : VssJsonCreationConverter + { + protected override AuthorizationGrant Create(Type objectType, JObject jsonObject) + { + var typeValue = jsonObject.GetValue(nameof(AuthorizationGrant.GrantType), StringComparison.OrdinalIgnoreCase); + if (typeValue == null) + { + throw new ArgumentException(WebApiResources.UnknownEntityType(typeValue)); + } + + GrantType grantType; + if (typeValue.Type == JTokenType.Integer) + { + grantType = (GrantType)(Int32)typeValue; + } + else if (typeValue.Type != JTokenType.String || !Enum.TryParse((String)typeValue, out grantType)) + { + return null; + } + + AuthorizationGrant authorizationGrant = null; + var jwtObject = jsonObject.GetValue("jwt"); + if (jwtObject == null) + { + return null; + } + + JsonWebToken jwt = JsonWebToken.Create(jwtObject.ToString()); + switch (grantType) + { + case GrantType.JwtBearer: + authorizationGrant = new JwtBearerAuthorizationGrant(jwt); + break; + + case GrantType.RefreshToken: + authorizationGrant = new RefreshTokenGrant(jwt); + break; + } + + return authorizationGrant; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/GrantType.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/GrantType.cs new file mode 100644 index 00000000000..67634f6606f --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/GrantType.cs @@ -0,0 +1,11 @@ +namespace GitHub.Services.DelegatedAuthorization +{ + public enum GrantType + { + None = 0, + JwtBearer = 1, + RefreshToken = 2, + Implicit = 3, + ClientCredentials = 4, + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/JwtBearerAuthorizationGrant.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/JwtBearerAuthorizationGrant.cs new file mode 100644 index 00000000000..ccfc3c243ec --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/JwtBearerAuthorizationGrant.cs @@ -0,0 +1,21 @@ +using GitHub.Services.WebApi.Jwt; +using System.Runtime.Serialization; + +namespace GitHub.Services.DelegatedAuthorization +{ + public class JwtBearerAuthorizationGrant : AuthorizationGrant + { + public JwtBearerAuthorizationGrant(JsonWebToken jwt) + : base(GrantType.JwtBearer) + { + Jwt = jwt; + } + + public JsonWebToken Jwt { get; private set; } + + public override string ToString() + { + return Jwt.EncodedToken; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/RefreshTokenGrant.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/RefreshTokenGrant.cs new file mode 100644 index 00000000000..fb434ba27c0 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/RefreshTokenGrant.cs @@ -0,0 +1,20 @@ +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.DelegatedAuthorization +{ + public class RefreshTokenGrant : AuthorizationGrant + { + public RefreshTokenGrant(JsonWebToken jwt) + : base(GrantType.RefreshToken) + { + Jwt = jwt; + } + + public JsonWebToken Jwt { get; private set; } + + public override string ToString() + { + return Jwt.EncodedToken; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/TokenError.cs b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/TokenError.cs new file mode 100644 index 00000000000..4f44bfd03ef --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/DelegatedAuthorization/TokenError.cs @@ -0,0 +1,39 @@ +namespace GitHub.Services.DelegatedAuthorization +{ + public enum TokenError + { + None, + GrantTypeRequired, + AuthorizationGrantRequired, + ClientSecretRequired, + RedirectUriRequired, + InvalidAuthorizationGrant, + InvalidAuthorizationScopes, + InvalidRefreshToken, + AuthorizationNotFound, + AuthorizationGrantExpired, + AccessAlreadyIssued, + InvalidRedirectUri, + AccessTokenNotFound, + InvalidAccessToken, + AccessTokenAlreadyRefreshed, + InvalidClientSecret, + ClientSecretExpired, + ServerError, + AccessDenied, + AccessTokenKeyRequired, + InvalidAccessTokenKey, + FailedToGetAccessToken, + InvalidClientId, + InvalidClient, + InvalidValidTo, + InvalidUserId, + FailedToIssueAccessToken, + AuthorizationGrantScopeMissing, + InvalidPublicAccessTokenKey, + InvalidPublicAccessToken, + /* Deprecated */ + PublicFeatureFlagNotEnabled, + SSHPolicyDisabled + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FileContainer/Enumerations.cs b/src/Sdk/WebApi/WebApi/Contracts/FileContainer/Enumerations.cs new file mode 100644 index 00000000000..788ea25a17d --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FileContainer/Enumerations.cs @@ -0,0 +1,68 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.FileContainer +{ + /// + /// Options a container can have. + /// + [Flags] + [DataContract] + public enum ContainerOptions + { + /// + /// No option. + /// + [EnumMember] + None = 0, + + ///// + ///// Encrypts content of the container. + ///// + //EncryptContent = 1 + } + + /// + /// Type of a container item. + /// + [DataContract] + public enum ContainerItemType + { + /// + /// Any item type. + /// + [EnumMember] + Any = 0, + + /// + /// Item is a folder which can have child items. + /// + [EnumMember] + Folder = 1, + + /// + /// Item is a file which is stored in the file service. + /// + [EnumMember] + File = 2, + } + + /// + /// Status of a container item. + /// + [DataContract] + public enum ContainerItemStatus + { + /// + /// Item is created. + /// + [EnumMember] + Created = 1, + + /// + /// Item is a file pending for upload. + /// + [EnumMember] + PendingUpload = 2 + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainer.cs b/src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainer.cs new file mode 100644 index 00000000000..e00fff59745 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainer.cs @@ -0,0 +1,118 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Services.FileContainer +{ + /// + /// Represents a container that encapsulates a hierarchical file system. + /// + [DataContract] + public class FileContainer + { + /// + /// Id. + /// + [DataMember(IsRequired = true)] + public Int64 Id { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Project Id. + /// + [DataMember(IsRequired = false)] + public Guid ScopeIdentifier { get;[EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Uri of the artifact associated with the container. + /// + [DataMember(IsRequired = true)] + public Uri ArtifactUri { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Security token of the artifact associated with the container. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String SecurityToken { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Name. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Name { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Description. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Description { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Total size of the files in bytes. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int64 Size { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Options the container can have. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ContainerOptions Options { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Identifier of the optional encryption key. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid SigningKeyId { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Owner. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid CreatedBy { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Creation date. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTime DateCreated { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Location of the item resource. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ItemLocation { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Download Url for the content of this item. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ContentLocation { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// ItemStore Locator for this container. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String LocatorPath { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + public override bool Equals(object obj) + { + FileContainer other = obj as FileContainer; + + if (other == null) + { + return false; + } + + return this.ArtifactUri == other.ArtifactUri && + this.Description == other.Description && + this.Id == other.Id && + this.Name == other.Name && + this.ScopeIdentifier == other.ScopeIdentifier; + } + + public override int GetHashCode() + { + return this.Id.GetHashCode(); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainerItem.cs b/src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainerItem.cs new file mode 100644 index 00000000000..e1c26e6c8d5 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FileContainer/FileContainerItem.cs @@ -0,0 +1,174 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using System.Text; + +namespace GitHub.Services.FileContainer +{ + /// + /// Represents an item in a container. + /// + [DataContract] + public class FileContainerItem + { + /// + /// Container Id. + /// + [DataMember(IsRequired = true)] + public Int64 ContainerId { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Project Id. + /// + [DataMember(IsRequired = false)] + public Guid ScopeIdentifier { get;[EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Unique path that identifies the item. + /// + [DataMember(IsRequired = true)] + public String Path + { + get + { + return m_path; + } + [EditorBrowsable(EditorBrowsableState.Never)] + set + { + m_path = EnsurePathFormat(value); + } + } + + /// + /// Type of the item: Folder, File or String. + /// + [DataMember(IsRequired = true)] + public ContainerItemType ItemType { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Status of the item: Created or Pending Upload. + /// + [DataMember(IsRequired = true)] + public ContainerItemStatus Status { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Length of the file. Zero if not of a file. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int64 FileLength { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Hash value of the file. Null if not a file. + /// + [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1819:PropertiesShouldNotReturnArrays")] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Byte[] FileHash { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Encoding of the file. Zero if not a file. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 FileEncoding { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Type of the file. Zero if not a file. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 FileType { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Creation date. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTime DateCreated { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Last modified date. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTime DateLastModified { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Creator. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid CreatedBy { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Modifier. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid LastModifiedBy { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Location of the item resource. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ItemLocation { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Download Url for the content of this item. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ContentLocation { get; [EditorBrowsable(EditorBrowsableState.Never)] set; } + + /// + /// Id of the file content. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [EditorBrowsable(EditorBrowsableState.Never)] + public Int32 FileId { get; set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public byte[] ContentId { get; set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Ticket { get; set; } + + public static string EnsurePathFormat(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return string.Empty; + } + + // We always make sure that the path is rooted + StringBuilder sb = new StringBuilder(); + String[] components = path.Split(new char[] { '\\', '/' }, StringSplitOptions.RemoveEmptyEntries); + + if (components.Length == 0) + { + return string.Empty; + } + + for (int i = 0; i < components.Length; i++) + { + sb.AppendFormat("{0}{1}", components[i], i == components.Length - 1 ? String.Empty : "/"); + } + + return sb.ToString(); + } + + public override bool Equals(object obj) + { + FileContainerItem other = obj as FileContainerItem; + if (other == null) + { + return false; + } + return this.ContainerId == other.ContainerId && + this.ScopeIdentifier == other.ScopeIdentifier && + this.Path == other.Path && + this.ItemType == other.ItemType; + } + public override int GetHashCode() + { + return Path.GetHashCode(); + } + + private string m_path; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDataType.cs b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDataType.cs new file mode 100644 index 00000000000..c2afa9212d4 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDataType.cs @@ -0,0 +1,47 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.FormInput +{ + /// + /// Enumerates data types that are supported as subscription input values. + /// + [DataContract] + public enum InputDataType + { + /// + /// No data type is specified. + /// + [EnumMember] + None = 0, + + /// + /// Represents a textual value. + /// + [EnumMember] + String = 10, + + /// + /// Represents a numberic value. + /// + [EnumMember] + Number = 20, + + /// + /// Represents a value of true or false. + /// + [EnumMember] + Boolean = 30, + + /// + /// Represents a Guid. + /// + [EnumMember] + Guid = 40, + + /// + /// Represents a URI. + /// + [EnumMember] + Uri = 50 + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDescriptor.cs b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDescriptor.cs new file mode 100644 index 00000000000..a2586816c24 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputDescriptor.cs @@ -0,0 +1,130 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Services.FormInput +{ + /// + /// Describes an input for subscriptions. + /// + [DataContract] + public class InputDescriptor : ISecuredObject + { + /// + /// Identifier for the subscription input + /// + [DataMember] + public String Id { get; set; } + + /// + /// Localized name which can be shown as a label for the subscription input + /// + [DataMember] + public String Name { get; set; } + + /// + /// Description of what this input is used for + /// + [DataMember] + public String Description { get; set; } + + /// + /// Underlying data type for the input value. When this value is specified, + /// InputMode, Validation and Values are optional. + /// + [DataMember] + public string Type { get; set; } + + /// + /// List of scopes supported. Null indicates all scopes are supported. + /// + public List SupportedScopes { get; set; } + + /// + /// Custom properties for the input which can be used by the service provider + /// + [DataMember] + public IDictionary Properties { get; set; } + + /// + /// Mode in which the value of this input should be entered + /// + [DataMember] + public InputMode InputMode { get; set; } + + /// + /// Gets whether this input is confidential, such as for a password or application key + /// + [DataMember] + public Boolean IsConfidential { get; set; } + + /// + /// Gets whether this input is included in the default generated action description. + /// + /// + [DataMember] + public Boolean UseInDefaultDescription { get; set; } + + /// + /// The group localized name to which this input belongs and can be shown as a header + /// for the container that will include all the inputs in the group. + /// + [DataMember] + public String GroupName { get; set; } + + /// + /// A hint for input value. It can be used in the UI as the input placeholder. + /// + [DataMember] + public String ValueHint { get; set; } + + /// + /// Information to use to validate this input's value + /// + [DataMember(EmitDefaultValue = false)] + public InputValidation Validation { get; set; } + + /// + /// Information about possible values for this input + /// + [DataMember(EmitDefaultValue = false)] + public InputValues Values { get; set; } + + /// + /// The ids of all inputs that the value of this input is dependent on. + /// + [DataMember(EmitDefaultValue = false)] + public IList DependencyInputIds { get; set; } + + /// + /// If true, the value information for this input is dynamic and + /// should be fetched when the value of dependency inputs change. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean HasDynamicValueInformation { get; set; } + + public void SetSecuredObjectProperties(Guid namespaceId, Int32 requiredPermissions, String token) + { + this.m_namespaceId = namespaceId; + this.m_requiredPermissions = requiredPermissions; + this.m_token = token; + + this.Validation?.SetSecuredObjectProperties(namespaceId, requiredPermissions, token); + this.Values?.SetSecuredObjectProperties(namespaceId, requiredPermissions, token); + } + + public Guid NamespaceId => m_namespaceId; + + public Int32 RequiredPermissions => m_requiredPermissions; + + public String GetToken() + { + return m_token; + } + + private Guid m_namespaceId; + private Int32 m_requiredPermissions; + private String m_token; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputMode.cs b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputMode.cs new file mode 100644 index 00000000000..cd819ea9eee --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputMode.cs @@ -0,0 +1,53 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.FormInput +{ + /// + /// Mode in which a subscription input should be entered (in a UI) + /// + [DataContract] + public enum InputMode + { + /// + /// This input should not be shown in the UI + /// + [EnumMember] + None = 0, + + /// + /// An input text box should be shown + /// + [EnumMember] + TextBox = 10, + + /// + /// An password input box should be shown + /// + [EnumMember] + PasswordBox = 20, + + /// + /// A select/combo control should be shown + /// + [EnumMember] + Combo = 30, + + /// + /// Radio buttons should be shown + /// + [EnumMember] + RadioButtons = 40, + + /// + /// Checkbox should be shown(for true/false values) + /// + [EnumMember] + CheckBox = 50, + + /// + /// A multi-line text area should be shown + /// + [EnumMember] + TextArea = 60 + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValidation.cs b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValidation.cs new file mode 100644 index 00000000000..c57de3670a9 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValidation.cs @@ -0,0 +1,81 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Services.FormInput +{ + /// + /// Describes what values are valid for a subscription input + /// + [DataContract] + public class InputValidation : ISecuredObject + { + /// + /// Gets or sets the data data type to validate. + /// + [DataMember(EmitDefaultValue = false)] + public InputDataType DataType { get; set; } + + /// + /// Gets or sets if this is a required field. + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsRequired { get; set; } + + /// + /// Gets or sets the pattern to validate. + /// + [DataMember(EmitDefaultValue = false)] + public String Pattern { get; set; } + + /// + /// Gets or sets the error on pattern mismatch. + /// + [DataMember(EmitDefaultValue = false)] + public String PatternMismatchErrorMessage { get; set; } + + /// + /// Gets or sets the minimum value for this descriptor. + /// + [DataMember(EmitDefaultValue = false)] + public Decimal? MinValue { get; set; } + + /// + /// Gets or sets the minimum value for this descriptor. + /// + [DataMember(EmitDefaultValue = false)] + public Decimal? MaxValue { get; set; } + + /// + /// Gets or sets the minimum length of this descriptor. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? MinLength { get; set; } + + /// + /// Gets or sets the maximum length of this descriptor. + /// + [DataMember(EmitDefaultValue = false)] + public Int32? MaxLength { get; set; } + + public void SetSecuredObjectProperties(Guid namespaceId, int requiredPermissions, string token) + { + this.m_namespaceId = namespaceId; + this.m_requiredPermissions = requiredPermissions; + this.m_token = token; + } + + public Guid NamespaceId => m_namespaceId; + + public int RequiredPermissions => m_requiredPermissions; + + public string GetToken() + { + return m_token; + } + + private Guid m_namespaceId; + private int m_requiredPermissions; + private string m_token; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValues.cs b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValues.cs new file mode 100644 index 00000000000..1ae8d80bca3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/FormInput/InputValues.cs @@ -0,0 +1,166 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Services.FormInput +{ + /// + /// Information about the possible/allowed values for a given subscription input + /// + [DataContract] + public class InputValues : ISecuredObject + { + /// + /// The id of the input + /// + [DataMember(EmitDefaultValue = false)] + public String InputId { get; set; } + + /// + /// The default value to use for this input + /// + [DataMember(EmitDefaultValue = false)] + public String DefaultValue { get; set; } + + /// + /// Possible values that this input can take + /// + [DataMember(EmitDefaultValue = false)] + public IList PossibleValues { get; set; } + + /// + /// Should the value be restricted to one of the values in the PossibleValues (True) + /// or are the values in PossibleValues just a suggestion (False) + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsLimitedToPossibleValues { get; set; } + + /// + /// Should this input be disabled + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsDisabled { get; set; } + + /// + /// Should this input be made read-only + /// + [DataMember(EmitDefaultValue = false)] + public Boolean IsReadOnly { get; set; } + + /// + /// Errors encountered while computing dynamic values. + /// + /// + [DataMember(EmitDefaultValue = false)] + public InputValuesError Error { get; set; } + + public void SetSecuredObjectProperties(Guid namespaceId, Int32 requiredPermissions, String token) + { + this.m_namespaceId = namespaceId; + this.m_requiredPermissions = requiredPermissions; + this.m_token = token; + + this.Error?.SetSecuredObjectProperties(namespaceId, requiredPermissions, token); + if (this.PossibleValues != null && this.PossibleValues.Any()) + { + foreach (var value in this.PossibleValues) + { + value.SetSecuredObjectProperties(namespaceId, requiredPermissions, token); + } + } + } + + public Guid NamespaceId => m_namespaceId; + + public Int32 RequiredPermissions => m_requiredPermissions; + + public String GetToken() + { + return m_token; + } + + private Guid m_namespaceId; + private Int32 m_requiredPermissions; + private String m_token; + } + + /// + /// Information about a single value for an input + /// + [DataContract] + public class InputValue : ISecuredObject + { + /// + /// The value to store for this input + /// + [DataMember] + public String Value { get; set; } + + /// + /// The text to show for the display of this value + /// + [DataMember(EmitDefaultValue = false)] + public String DisplayValue { get; set; } + + /// + /// Any other data about this input + /// + [DataMember(EmitDefaultValue = false)] + public IDictionary Data { get; set; } + + public void SetSecuredObjectProperties(Guid namespaceId, Int32 requiredPermissions, String token) + { + this.m_namespaceId = namespaceId; + this.m_requiredPermissions = requiredPermissions; + this.m_token = token; + } + + public Guid NamespaceId => m_namespaceId; + + public Int32 RequiredPermissions => m_requiredPermissions; + + public String GetToken() + { + return m_token; + } + + private Guid m_namespaceId; + private Int32 m_requiredPermissions; + private String m_token; + } + + /// + /// Error information related to a subscription input value. + /// + [DataContract] + public class InputValuesError : ISecuredObject + { + /// + /// The error message. + /// + [DataMember] + public String Message { get; set; } + + public void SetSecuredObjectProperties(Guid namespaceId, Int32 requiredPermissions, String token) + { + this.m_namespaceId = namespaceId; + this.m_requiredPermissions = requiredPermissions; + this.m_token = token; + } + + public Guid NamespaceId => m_namespaceId; + + public Int32 RequiredPermissions => m_requiredPermissions; + + public String GetToken() + { + return m_token; + } + + private Guid m_namespaceId; + private Int32 m_requiredPermissions; + private String m_token; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroup.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroup.cs new file mode 100644 index 00000000000..e790bd5cf1e --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroup.cs @@ -0,0 +1,171 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using IdentityDescriptor = GitHub.Services.Identity.IdentityDescriptor; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Graph group entity + /// + [DataContract] + public class GraphGroup : GraphMember + { + public override string SubjectKind => Constants.SubjectKind.Group; + + /// + /// A short phrase to help human readers disambiguate groups with similar names + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string Description { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal string SpecialType { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeSpecialType() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal Guid ScopeId { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeScopeId() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal string ScopeType { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeScopeType() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal string ScopeName { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeScopeName() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal Guid LocalScopeId { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeLocalScopeId() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal Guid SecuringHostId { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeSecuringHostId() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal bool IsRestrictedVisible { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeIsRestrictedVisible() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal bool IsCrossProject { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeIsIsCrossProject() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal bool IsGlobalScope { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeIsGlobalScope() => ShoudSerializeInternals; + + [DataMember(IsRequired = false, EmitDefaultValue = false), EditorBrowsable(EditorBrowsableState.Never), ClientInternalUseOnly] + internal bool IsDeleted { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + internal GraphGroup( + string origin, + string originId, + SubjectDescriptor descriptor, + IdentityDescriptor legacyDescriptor, + string displayName, + ReferenceLinks links, + string url, + string domain, + string principalName, + string mailAddress, + string description, + string specialType, + Guid scopeId, + string scopeType, + string scopeName, + Guid localScopeId, + Guid securingHostId, + bool isRestrictedVisible, + bool isCrossProject, + bool isGlobalScope, + bool isDeleted) + : base(origin, originId, descriptor, legacyDescriptor, displayName, links, url, domain, principalName, mailAddress) + { + Description = description; + SpecialType = specialType; + ScopeId = scopeId; + ScopeType = scopeType; + ScopeName = scopeName; + LocalScopeId = localScopeId; + SecuringHostId = securingHostId; + IsRestrictedVisible = isRestrictedVisible; + IsCrossProject = isCrossProject; + IsGlobalScope = isGlobalScope; + IsDeleted = isDeleted; + } + + // this is how we replace/overwrite parameters and create a new object + // and keep our internal objects immutable + [EditorBrowsable(EditorBrowsableState.Never)] + internal GraphGroup( + GraphGroup group, + string origin = null, + string originId = null, + SubjectDescriptor? descriptor = null, + IdentityDescriptor legacyDescriptor = null, + string displayName = null, + ReferenceLinks links = null, + string url = null, + string domain = null, + string principalName = null, + string mailAddress = null, + string description = null, + string specialType = null, + Guid? scopeId = null, + string scopeType = null, + string scopeName = null, + Guid? localScopeId = null, + Guid? securingHostId = null, + bool? isRestrictedVisible = null, + bool? isCrossProject = null, + bool? isGlobalScope = null, + bool? isDeleted = null) + : this(origin ?? group?.Origin, + originId ?? group?.OriginId, + descriptor ?? group?.Descriptor ?? default(SubjectDescriptor), + legacyDescriptor ?? group?.LegacyDescriptor ?? default(IdentityDescriptor), + displayName ?? group?.DisplayName, + links ?? group?.Links, + url ?? group?.Url, + domain ?? group?.Domain, + principalName ?? group?.PrincipalName, + mailAddress ?? group?.MailAddress, + description ?? group?.Description, + specialType ?? group?.SpecialType, + scopeId ?? group?.ScopeId ?? default(Guid), + scopeType ?? group?.ScopeType, + scopeName ?? group?.ScopeName, + localScopeId ?? group?.LocalScopeId ?? default(Guid), + securingHostId ?? group?.SecuringHostId ?? default(Guid), + isRestrictedVisible ?? group?.IsRestrictedVisible ?? default(bool), + isCrossProject ?? group?.IsCrossProject ?? default(bool), + isGlobalScope ?? group?.IsGlobalScope ?? default(bool), + isDeleted ?? group?.IsDeleted ?? default(bool)) + { } + + // only for serialization + protected GraphGroup() { } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContext.cs new file mode 100644 index 00000000000..515111d3a68 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContext.cs @@ -0,0 +1,105 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using Newtonsoft.Json; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Do not attempt to use this type to create a new group. This + /// type does not contain sufficient fields to create a new group. + /// + [DataContract] + [JsonConverter(typeof(GraphGroupCreationContextJsonConverter))] + public abstract class GraphGroupCreationContext + { + /// + /// Optional: If provided, we will use this identifier for the storage key of the created group + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid StorageKey { get; set; } + } + + /// + /// Use this type to create a new group using the OriginID as a reference to an existing group from an external + /// AD or AAD backed provider. This is the subset of GraphGroup fields required for creation of + /// a group for the AD and AAD use case. + /// + [DataContract] + public class GraphGroupOriginIdCreationContext : GraphGroupCreationContext + { + /// + /// This should be the object id or sid of the group from the source AD or AAD provider. + /// Example: d47d025a-ce2f-4a79-8618-e8862ade30dd + /// Team Services will communicate with the source provider to fill all other fields on creation. + /// + [DataMember(IsRequired = true)] + public string OriginId { get; set; } + } + + /// + /// Use this type to create a new group using the mail address as a reference to an existing group from an external + /// AD or AAD backed provider. This is the subset of GraphGroup fields required for creation of + /// a group for the AAD and AD use case. + /// + [DataContract] + public class GraphGroupMailAddressCreationContext : GraphGroupCreationContext + { + /// + /// This should be the mail address or the group in the source AD or AAD provider. + /// Example: jamal@contoso.com + /// Team Services will communicate with the source provider to fill all other fields on creation. + /// + [DataMember(IsRequired = true)] + public string MailAddress { get; set; } + } + + /// + /// Use this type to create a new Vsts group that is not backed by an external provider. + /// + [DataContract] + public class GraphGroupVstsCreationContext : GraphGroupCreationContext + { + /// + /// Used by VSTS groups; if set this will be the group DisplayName, otherwise ignored + /// + [DataMember(IsRequired = true)] + public string DisplayName { get; set; } + + /// + /// Used by VSTS groups; if set this will be the group description, otherwise ignored + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string Description { get; set; } + + /// + /// Internal use only. An optional sid to use for group creation. + /// + public SubjectDescriptor Descriptor { get; set; } + + + [DataMember(Name = "Descriptor", IsRequired = false, EmitDefaultValue = false)] + private string DescriptorString + { + get { return Descriptor.ToString(); } + set { Descriptor = SubjectDescriptor.FromString(value); } + } + /// + /// For internal use only in back compat scenarios. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool CrossProject { get; set; } + + /// + /// For internal use only in back compat scenarios. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool RestrictedVisibility { get; set; } + + /// + /// For internal use only in back compat scenarios. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string SpecialGroupType { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContextJsonConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContextJsonConverter.cs new file mode 100644 index 00000000000..a605270d052 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphGroupCreationContextJsonConverter.cs @@ -0,0 +1,46 @@ +using System; +using System.Linq; +using Newtonsoft.Json.Linq; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Graph.Client +{ + public class GraphGroupCreationContextJsonConverter : VssJsonCreationConverter + { + protected override GraphGroupCreationContext Create(Type objectType, JObject jsonObject) + { + // enforce origin id or principalname or displayName + var hasOriginId = jsonObject["originId"] != null; + var hasMailAddress = jsonObject["mailAddress"] != null; + var hasDisplayName = jsonObject["displayName"] != null; + var requiredFields = new bool[] + { + hasOriginId, + hasDisplayName, + hasMailAddress + }; + + if (requiredFields.Count(b => b) > 1) + { + throw new ArgumentNullException(WebApiResources.GraphGroupMissingRequiredFields()); + } + + if (hasOriginId) + { + return new GraphGroupOriginIdCreationContext(); + } + + if (hasMailAddress) + { + return new GraphGroupMailAddressCreationContext(); + } + + if (hasDisplayName) + { + return new GraphGroupVstsCreationContext(); + } + + throw new ArgumentException(WebApiResources.GraphGroupMissingRequiredFields()); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphMember.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphMember.cs new file mode 100644 index 00000000000..4e150be7d28 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphMember.cs @@ -0,0 +1,58 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using IdentityDescriptor = GitHub.Services.Identity.IdentityDescriptor; + +namespace GitHub.Services.Graph.Client +{ + [DataContract] + public abstract class GraphMember : GraphSubject + { + /// + /// This represents the name of the container of origin for a graph member. + /// (For MSA this is "Windows Live ID", for AD the name of the domain, for AAD the + /// tenantID of the directory, for VSTS groups the ScopeId, etc) + /// + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public string Domain { get; private set; } + + /// + /// This is the PrincipalName of this graph member from the source provider. The source + /// provider may change this field over time and it is not guaranteed to be immutable + /// for the life of the graph member by VSTS. + /// + [DataMember] + public string PrincipalName { get; private set; } + + /// + /// The email address of record for a given graph member. This may be different + /// than the principal name. + /// + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public string MailAddress { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + protected GraphMember( + string origin, + string originId, + SubjectDescriptor descriptor, + IdentityDescriptor legacyDescriptor, + string displayName, + ReferenceLinks links, + string url, + string domain, + string principalName, + string mailAddress) + : base(origin, originId, descriptor, legacyDescriptor, displayName, links, url) + { + Domain = domain; + PrincipalName = principalName; + MailAddress = mailAddress; + } + + // only for serialization + protected GraphMember() { } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphScope.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphScope.cs new file mode 100644 index 00000000000..362d295975f --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphScope.cs @@ -0,0 +1,138 @@ +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GroupScopeType = GitHub.Services.Identity.GroupScopeType; +using IdentityDescriptor = GitHub.Services.Identity.IdentityDescriptor; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Container where a graph entity is defined (organization, project, team) + /// + [DataContract] + public class GraphScope : GraphSubject + { + public override string SubjectKind => Constants.SubjectKind.Scope; + + /// + /// The subject descriptor that references the administrators group for this scope. Only + /// members of this group can change the contents of this scope or assign other users + /// permissions to access this scope. + /// + public SubjectDescriptor AdministratorDescriptor { get; private set; } + + /// + /// The subject descriptor that references the administrators group for this scope. Only + /// members of this group can change the contents of this scope or assign other users + /// permissions to access this scope. + /// + [DataMember(Name = "AdministratorDescriptor", IsRequired = false, EmitDefaultValue = false)] + private string AdministratorString + { + get { return AdministratorDescriptor.ToString(); } + set { AdministratorDescriptor = SubjectDescriptor.FromString(value); } + } + + /// + /// When true, this scope is also a securing host for one or more scopes. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool IsGlobal { get; private set; } + + /// + /// The subject descriptor of the parent scope. + /// + public SubjectDescriptor ParentDescriptor { get; private set; } + + /// + /// The subject descriptor for the closest account or organization in the + /// ancestor tree of this scope. + /// + [DataMember(Name = "ParentDescriptor", IsRequired = false, EmitDefaultValue = false)] + private string ParentDescriptorString + { + get { return ParentDescriptor.ToString(); } + set { ParentDescriptor = SubjectDescriptor.FromString(value); } + } + + /// + /// The subject descriptor for the containing organization in the ancestor tree + /// of this scope. + /// + public SubjectDescriptor SecuringHostDescriptor { get; private set; } + + /// + /// The subject descriptor for the containing organization in the ancestor tree + /// of this scope. + /// + [DataMember(Name = "SecuringHostDescriptor", IsRequired = false, EmitDefaultValue = false)] + private string SecuringHostDescriptorString + { + get { return SecuringHostDescriptor.ToString(); } + set { SecuringHostDescriptor = SubjectDescriptor.FromString(value); } + } + + /// + /// The type of this scope. Typically ServiceHost or TeamProject. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public GroupScopeType ScopeType { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + internal GraphScope( + string origin, + string originId, + SubjectDescriptor descriptor, + IdentityDescriptor legacyDescriptor, + string displayName, + ReferenceLinks links, + string url, + SubjectDescriptor administratorDescriptor, + bool isGlobal, + SubjectDescriptor parentDescriptor, + SubjectDescriptor securingHostDescriptor, + GroupScopeType scopeType = GroupScopeType.Generic) + : base(origin, originId, descriptor, legacyDescriptor, displayName, links, url) + { + AdministratorDescriptor = administratorDescriptor; + IsGlobal = isGlobal; + ParentDescriptor = parentDescriptor; + SecuringHostDescriptor = securingHostDescriptor; + ScopeType = scopeType; + } + + // this is how we replace/overwrite parameters and create a new object + // and keep our internal objects immutable + internal GraphScope( + GraphScope scope, + string origin = null, + string originId = null, + SubjectDescriptor? descriptor = null, + IdentityDescriptor legacyDescriptor = null, + string displayName = null, + ReferenceLinks links = null, + string url = null, + SubjectDescriptor? administrator = null, + bool? isGlobal = null, + SubjectDescriptor? parentDescriptor = null, + SubjectDescriptor? securingHostDescriptor = null, + GroupScopeType? scopeType = GroupScopeType.Generic) + : this(origin ?? scope?.Origin, + originId ?? scope?.OriginId, + descriptor ?? scope?.Descriptor ?? default(SubjectDescriptor), + legacyDescriptor ?? scope?.LegacyDescriptor ?? default(IdentityDescriptor), + displayName ?? scope?.DisplayName, + links ?? scope?.Links, + url ?? scope?.Url, + administrator ?? scope?.AdministratorDescriptor ?? default(SubjectDescriptor), + isGlobal ?? scope?.IsGlobal ?? default(bool), + parentDescriptor ?? scope?.ParentDescriptor ?? default(SubjectDescriptor), + securingHostDescriptor ?? scope?.SecuringHostDescriptor ?? default(SubjectDescriptor), + scopeType ?? scope?.ScopeType ?? default(GroupScopeType)) + { } + + // only for serialization + protected GraphScope() { } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubject.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubject.cs new file mode 100644 index 00000000000..fc48a5bd91c --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubject.cs @@ -0,0 +1,78 @@ +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using Newtonsoft.Json; +using IdentityDescriptor = GitHub.Services.Identity.IdentityDescriptor; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Top-level graph entity + /// + [DataContract] + [JsonConverter(typeof(GraphSubjectJsonConverter))] + public abstract class GraphSubject : GraphSubjectBase + { + /// + /// This field identifies the type of the graph subject (ex: Group, Scope, User). + /// + [DataMember] + public abstract string SubjectKind { get; } + + /// + /// The type of source provider for the origin identifier (ex:AD, AAD, MSA) + /// + [DataMember] + public string Origin { get; private set; } + + /// + /// The unique identifier from the system of origin. Typically a sid, object id or Guid. Linking + /// and unlinking operations can cause this value to change for a user because the user is not + /// backed by a different provider and has a different unique id in the new provider. + /// + [DataMember] + public string OriginId { get; private set; } + + /// + /// [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor. + /// + [ClientInternalUseOnly] + internal IdentityDescriptor LegacyDescriptor { get; private set; } + + /// + /// [Internal Use Only] The legacy descriptor is here in case you need to access old version IMS using identity descriptor. + /// + [DataMember(Name = "LegacyDescriptor", IsRequired = false, EmitDefaultValue = false)] + private string LegacyDescriptorString + { + get { return LegacyDescriptor?.ToString(); } + set { LegacyDescriptor = IdentityDescriptor.FromString(value); } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeLegacyDescriptorString() => ShoudSerializeInternals; + + [ClientInternalUseOnly] + internal bool ShoudSerializeInternals; + + // only for serialization + protected GraphSubject() { } + + [EditorBrowsable(EditorBrowsableState.Never)] + protected GraphSubject( + string origin, + string originId, + SubjectDescriptor descriptor, + IdentityDescriptor legacyDescriptor, + string displayName, + ReferenceLinks links, + string url) : base(descriptor, displayName, links, url) + { + Origin = origin; + OriginId = originId; + LegacyDescriptor = legacyDescriptor; + ShoudSerializeInternals = false; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectBase.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectBase.cs new file mode 100644 index 00000000000..71d1cd09507 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectBase.cs @@ -0,0 +1,80 @@ +using System.ComponentModel; +using System.Runtime.Serialization; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Xml; +using Newtonsoft.Json; + +namespace GitHub.Services.Graph.Client +{ + [JsonObject(MemberSerialization = MemberSerialization.OptIn)] + [XmlSerializableDataContract] + public abstract class GraphSubjectBase : IXmlSerializable + { + /// The descriptor is the primary way to reference the graph subject while the system is running. This field + /// will uniquely identify the same graph subject across both Accounts and Organizations. + /// + public SubjectDescriptor Descriptor { get; protected set; } + + /// + /// The descriptor is the primary way to reference the graph subject while the system is running. This field + /// will uniquely identify the same graph subject across both Accounts and Organizations. + /// + [DataMember(Name = "Descriptor", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "Descriptor", DefaultValueHandling = DefaultValueHandling.Ignore)] + private string DescriptorString + { + get { return Descriptor.ToString(); } + set { Descriptor = SubjectDescriptor.FromString(value); } + } + + /// + /// This is the non-unique display name of the graph subject. To change this field, you must alter its value in the + /// source provider. + /// + [DataMember] + [JsonProperty] + public string DisplayName { get; protected set; } + + /// + /// This field contains zero or more interesting links about the graph subject. These links may be invoked to obtain additional + /// relationships or more detailed information about this graph subject. + /// + [DataMember(Name = "_links", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "_links", DefaultValueHandling = DefaultValueHandling.Ignore)] + [XmlIgnore] // ReferenceLinks type does not currently support XML serialization (#1164908 for tracking) + public ReferenceLinks Links { get; protected set; } + + /// + /// This url is the full route to the source resource of this graph subject. + /// + [DataMember(EmitDefaultValue = false)] + [JsonProperty(DefaultValueHandling = DefaultValueHandling.Ignore)] + public string Url { get; protected set; } + + // only for serialization + protected GraphSubjectBase() { } + + [EditorBrowsable(EditorBrowsableState.Never)] + protected GraphSubjectBase( + SubjectDescriptor descriptor, + string displayName, + ReferenceLinks links, + string url) + { + Descriptor = descriptor; + DisplayName = displayName; + Links = links; + Url = url; + } + + XmlSchema IXmlSerializable.GetSchema() { return null; } + + void IXmlSerializable.ReadXml(XmlReader reader) => reader.ReadDataMemberXml(this); + + void IXmlSerializable.WriteXml(XmlWriter writer) => writer.WriteDataMemberXml(this); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectJsonConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectJsonConverter.cs new file mode 100644 index 00000000000..0953b71863d --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSubjectJsonConverter.cs @@ -0,0 +1,42 @@ +using System; +using System.Linq; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.Graph.Client +{ + public class GraphSubjectJsonConverter : VssJsonCreationConverter + { + protected override GraphSubject Create(Type objectType, JObject jsonObject) + { + var subjectKindObject = jsonObject.GetValue(nameof(GraphSubject.SubjectKind), StringComparison.OrdinalIgnoreCase); + if (subjectKindObject == null) + { + throw new ArgumentException(WebApiResources.UnknownEntityType(subjectKindObject)); + } + var typeName = subjectKindObject.ToString(); + switch (typeName) + { + case Constants.SubjectKind.Group: + var groupInfo = typeof(GraphGroup).GetTypeInfo(); + var graphGroupConstructor = groupInfo.DeclaredConstructors.First(x => x.GetParameters().Length == 0); + return (GraphGroup)graphGroupConstructor.Invoke(null); + case Constants.SubjectKind.Scope: + var scopeInfo = typeof(GraphScope).GetTypeInfo(); + var graphScopeConstructor = scopeInfo.DeclaredConstructors.First(x => x.GetParameters().Length == 0); + return (GraphScope)graphScopeConstructor.Invoke(null); + case Constants.SubjectKind.User: + var userInfo = typeof(GraphUser).GetTypeInfo(); + var graphUserConstructor = userInfo.DeclaredConstructors.First(x => x.GetParameters().Length == 0); + return (GraphUser)graphUserConstructor.Invoke(null); + case Constants.SubjectKind.SystemSubject: + var systemSubjectInfo = typeof(GraphSystemSubject).GetTypeInfo(); + var graphSystemSubjectConstructor = systemSubjectInfo.DeclaredConstructors.First(x => x.GetParameters().Length == 0); + return (GraphSystemSubject)graphSystemSubjectConstructor.Invoke(null); + default: + throw new ArgumentException(WebApiResources.UnknownEntityType(typeName)); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSystemSubject.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSystemSubject.cs new file mode 100644 index 00000000000..7a26f2e37c3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphSystemSubject.cs @@ -0,0 +1,30 @@ +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using IdentityDescriptor = GitHub.Services.Identity.IdentityDescriptor; + +namespace GitHub.Services.Graph.Client +{ + [DataContract] + public class GraphSystemSubject : GraphSubject + { + public override string SubjectKind => Constants.SubjectKind.SystemSubject; + + [EditorBrowsable(EditorBrowsableState.Never)] + internal GraphSystemSubject( + string origin, + string originId, + SubjectDescriptor descriptor, + IdentityDescriptor legacyDescriptor, + string displayName, + ReferenceLinks links, + string url) + : base(origin, originId, descriptor, legacyDescriptor, displayName, links, url) + { + } + + // only for serialization + protected GraphSystemSubject() { } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUser.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUser.cs new file mode 100644 index 00000000000..5dd2cc866fe --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUser.cs @@ -0,0 +1,107 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using IdentityDescriptor = GitHub.Services.Identity.IdentityDescriptor; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Graph user entity + /// + [DataContract] + public class GraphUser : GraphMember + { + public override string SubjectKind => Constants.SubjectKind.User; + + /// + /// The meta type of the user in the origin, such as "member", "guest", etc. + /// See for the set of possible values. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string MetaType { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false), ClientInternalUseOnly] + internal DateTime MetadataUpdateDate { get; private set; } + + /// + /// The short, generally unique name for the user in the backing directory. + /// For AAD users, this corresponds to the mail nickname, which is often but not necessarily similar + /// to the part of the user's mail address before the @ sign. + /// For GitHub users, this corresponds to the GitHub user handle. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string DirectoryAlias { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + public virtual bool ShouldSerializeMetadataUpdateDate() => ShoudSerializeInternals; + + /// + /// When true, the group has been deleted in the identity provider + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool IsDeletedInOrigin { get; private set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + internal GraphUser( + string origin, + string originId, + SubjectDescriptor descriptor, + IdentityDescriptor legacyDescriptor, + string displayName, + ReferenceLinks links, + string url, + string domain, + string principalName, + string mailAddress, + string metaType, + DateTime metadataUpdateDate, + bool isDeletedInOrigin, + string directoryAlias) + : base(origin, originId, descriptor, legacyDescriptor, displayName, links, url, domain, principalName, mailAddress) + { + MetaType = metaType; + MetadataUpdateDate = metadataUpdateDate; + IsDeletedInOrigin = isDeletedInOrigin; + DirectoryAlias = directoryAlias; + } + + // this is how we replace/overwrite parameters and create a new object + // and keep our internal objects immutable + internal GraphUser( + GraphUser user, + string origin = null, + string originId = null, + SubjectDescriptor? descriptor = null, + IdentityDescriptor legacyDescriptor = null, + string displayName = null, + ReferenceLinks links = null, + string url = null, + string domain = null, + string principalName = null, + string mailAddress = null, + string metaType = null, + DateTime? metadataUpdateDate = null, + bool? isDeletedInOrigin = false, + string directoryAlias = null) + : this(origin ?? user?.Origin, + originId ?? user?.OriginId, + descriptor ?? user?.Descriptor ?? default(SubjectDescriptor), + legacyDescriptor ?? user?.LegacyDescriptor ?? default(IdentityDescriptor), + displayName ?? user?.DisplayName, + links ?? user?.Links, + url ?? user?.Url, + domain ?? user?.Domain, + principalName ?? user?.PrincipalName, + mailAddress ?? user?.MailAddress, + metaType ?? user?.MetaType, + metadataUpdateDate ?? user?.MetadataUpdateDate ?? DateTime.MinValue, + isDeletedInOrigin ?? user?.IsDeletedInOrigin ?? default, + directoryAlias ?? user?.DirectoryAlias) + { } + + // only for serialization + protected GraphUser() { } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserCreationContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserCreationContext.cs new file mode 100644 index 00000000000..2350e8c08d1 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserCreationContext.cs @@ -0,0 +1,81 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Do not attempt to use this type to create a new user. Use + /// one of the subclasses instead. This type does not contain + /// sufficient fields to create a new user. + /// + [DataContract] + [JsonConverter(typeof(GraphUserCreationContextJsonConverter))] + public abstract class GraphUserCreationContext + { + /// + /// Optional: If provided, we will use this identifier for the storage key of the created user + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid StorageKey { get; set; } + } + + /// + /// Use this type to create a new user using the OriginID as a reference to an existing user from an external + /// AD or AAD backed provider. This is the subset of GraphUser fields required for creation of + /// a GraphUser for the AD and AAD use case when looking up the user by its unique ID in the backing provider. + /// + [DataContract] + public class GraphUserOriginIdCreationContext : GraphUserCreationContext + { + /// + /// This should be the object id or sid of the user from the source AD or AAD provider. + /// Example: d47d025a-ce2f-4a79-8618-e8862ade30dd + /// Team Services will communicate with the source provider to fill all other fields on creation. + /// + [DataMember(IsRequired = true)] + public string OriginId { get; set; } + + + /// + /// This should be the name of the origin provider. + /// Example: github.com + /// + [DataMember(IsRequired = false)] + public string Origin { get; set; } + } + + /// + /// Use this type to create a new user using the principal name as a reference to an existing user from an external + /// AD or AAD backed provider. This is the subset of GraphUser fields required for creation of + /// a GraphUser for the AD and AAD use case when looking up the user by its principal name in the backing provider. + /// + [DataContract] + public class GraphUserPrincipalNameCreationContext : GraphUserCreationContext + { + /// + /// This should be the principal name or upn of the user in the source AD or AAD provider. + /// Example: jamal@contoso.com + /// Team Services will communicate with the source provider to fill all other fields on creation. + /// + [DataMember(IsRequired = true)] + public string PrincipalName { get; set; } + } + + /// + /// Use this type to create a new user using the mail address as a reference to an existing user from an external + /// AD or AAD backed provider. This is the subset of GraphUser fields required for creation of + /// a GraphUser for the AD and AAD use case when looking up the user by its mail address in the backing provider. + /// + [DataContract] + public class GraphUserMailAddressCreationContext : GraphUserCreationContext + { + /// + /// This should be the mail address of the user in the source AD or AAD provider. + /// Example: Jamal.Hartnett@contoso.com + /// Team Services will communicate with the source provider to fill all other fields on creation. + /// + { + protected override GraphUserCreationContext Create(Type objectType, JObject jsonObject) + { + // enforce origin id or principalname or displayName + var hasOriginId = jsonObject["originId"] != null; + var hasPrincipalName = jsonObject["principalName"] != null; + var hasMailAddress = jsonObject["mailAddress"] != null; + var requiredFields = new bool[] + { + hasOriginId, + hasPrincipalName, + hasMailAddress, + }; + + if (requiredFields.Count(b => b) != 1) + { + throw new ArgumentException(WebApiResources.GraphUserMissingRequiredFields()); + } + + if (hasOriginId) + { + return new GraphUserOriginIdCreationContext(); + } + + if (hasPrincipalName) + { + return new GraphUserPrincipalNameCreationContext(); + } + + if (hasMailAddress) + { + return new GraphUserMailAddressCreationContext(); + } + + throw new ArgumentException(WebApiResources.GraphUserMissingRequiredFields()); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContext.cs new file mode 100644 index 00000000000..9a50f3aaf07 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContext.cs @@ -0,0 +1,45 @@ +using System; +using System.Runtime.Serialization; +using Newtonsoft.Json; +using System.ComponentModel; + +namespace GitHub.Services.Graph.Client +{ + /// + /// Do not attempt to use this type to update user. Use + /// one of the subclasses instead. This type does not contain + /// sufficient fields to create a new user. + /// + [DataContract] + [JsonConverter(typeof(GraphUserUpdateContextJsonConverter))] + public abstract class GraphUserUpdateContext + { + /// + /// Storage key should not be specified in case of updating user + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [EditorBrowsable(EditorBrowsableState.Never), Obsolete()] + public Guid StorageKey { get; set; } + + //Currently there's a bug on the client generator that if a class doesn't have data member, it wouldn't get generated + //We're adding a temporary data member here in order to get passed that issue + //BUG 1466336 has been created to track this issue. Once the bug is fixed, we'll remove this data member. + //Marking it as obsolete and never use for now to ensure no one can access + } + /// + /// Use this type to update an existing user using the OriginID as a reference to an existing user from an external + /// AD or AAD backed provider. This is the subset of GraphUser fields required for creation of + /// a GraphUser for the AD and AAD use case when looking up the user by its unique ID in the backing provider. + /// + [DataContract] + public class GraphUserOriginIdUpdateContext : GraphUserUpdateContext + { + /// + /// This should be the object id or sid of the user from the source AD or AAD provider. + /// Example: d47d025a-ce2f-4a79-8618-e8862ade30dd + /// Azure Devops will communicate with the source provider to fill all other fields on creation. + /// + [DataMember(IsRequired = true)] + public string OriginId { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContextJsonConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContextJsonConverter.cs new file mode 100644 index 00000000000..8590042367a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Client/GraphUserUpdateContextJsonConverter.cs @@ -0,0 +1,33 @@ +using System; +using System.Linq; +using Newtonsoft.Json.Linq; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Graph.Client +{ + public class GraphUserUpdateContextJsonConverter : VssJsonCreationConverter + { + protected override GraphUserUpdateContext Create(Type objectType, JObject jsonObject) + { + // enforce origin id or principalname or displayName + var hasOriginId = jsonObject["originId"] != null; + + var requiredFields = new bool[] + { + hasOriginId + }; + + if (requiredFields.Count(b => b) != 1) + { + throw new ArgumentException(WebApiResources.GraphUserMissingRequiredFields()); + } + + if (hasOriginId) + { + return new GraphUserOriginIdUpdateContext(); + } + + throw new ArgumentException(WebApiResources.GraphUserMissingRequiredFields()); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Graph/Constants.cs b/src/Sdk/WebApi/WebApi/Contracts/Graph/Constants.cs new file mode 100644 index 00000000000..5245b4426c5 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Graph/Constants.cs @@ -0,0 +1,198 @@ +using System; +using System.Collections.Generic; +using GitHub.Services.Common; + +namespace GitHub.Services.Graph +{ + public static class Constants + { + static Constants() + { + // For the normalization of incoming IdentityType strings. + // This is an optimization; it is not required that any particular IdentityType values + // appear in this list, but it helps performance to have common values here + var subjectTypeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { SubjectType.AadUser, SubjectType.AadUser }, + { SubjectType.MsaUser, SubjectType.MsaUser }, + { SubjectType.UnknownUser, SubjectType.UnknownUser }, + { SubjectType.AadGroup, SubjectType.AadGroup }, + { SubjectType.VstsGroup, SubjectType.VstsGroup }, + { SubjectType.UnknownGroup, SubjectType.UnknownGroup }, + { SubjectType.BindPendingUser, SubjectType.BindPendingUser }, + { SubjectType.WindowsIdentity, SubjectType.WindowsIdentity }, + { SubjectType.UnauthenticatedIdentity, SubjectType.UnauthenticatedIdentity }, + { SubjectType.ServiceIdentity, SubjectType.ServiceIdentity }, + { SubjectType.AggregateIdentity, SubjectType.AggregateIdentity }, + { SubjectType.ImportedIdentity, SubjectType.ImportedIdentity }, + { SubjectType.ServerTestIdentity, SubjectType.ServerTestIdentity }, + { SubjectType.GroupScopeType, SubjectType.GroupScopeType }, + { SubjectType.CspPartnerIdentity, SubjectType.CspPartnerIdentity }, + { SubjectType.SystemServicePrincipal, SubjectType.SystemServicePrincipal }, + { SubjectType.SystemLicense, SubjectType.SystemLicense }, + { SubjectType.SystemPublicAccess, SubjectType.SystemPublicAccess}, + { SubjectType.SystemAccessControl, SubjectType.SystemAccessControl }, + { SubjectType.SystemScope, SubjectType.SystemScope }, + { SubjectType.AcsServiceIdentity, SubjectType.AcsServiceIdentity }, + { SubjectType.Unknown, SubjectType.Unknown }, + }; + + SubjectTypeMap = subjectTypeMap; + + var socialTypeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { SocialType.GitHub, SocialType.GitHub }, + { SocialType.Unknown, SocialType.Unknown }, + }; + + SocialTypeMap = socialTypeMap; + } + + [GenerateSpecificConstants] + public static class SubjectKind + { + [GenerateConstant] + public const string Group = "group"; + public const string Scope = "scope"; + [GenerateConstant] + public const string User = "user"; + public const string SystemSubject = "systemSubject"; + } + + [GenerateSpecificConstants] + public static class SubjectType + { + [GenerateConstant] + public const string AadUser = "aad"; + [GenerateConstant] + public const string MsaUser = "msa"; + public const string UnknownUser = "unusr"; // user with unknown type (not add nor msa) + [GenerateConstant] + public const string AadGroup = "aadgp"; + [GenerateConstant] + public const string VstsGroup = "vssgp"; + public const string UnknownGroup = "ungrp"; // group with unknown type (not add nor vsts) + [GenerateConstant] + public const string BindPendingUser = "bnd"; + public const string WindowsIdentity = "win"; + public const string UnauthenticatedIdentity = "uauth"; + public const string ServiceIdentity = "svc"; + public const string AggregateIdentity = "agg"; + public const string ImportedIdentity = "imp"; + public const string ServerTestIdentity = "tst"; + public const string GroupScopeType = "scp"; + public const string CspPartnerIdentity = "csp"; + public const string SystemServicePrincipal = "s2s"; + public const string SystemLicense = "slic"; + public const string SystemScope = "sscp"; + public const string SystemCspPartner = "scsp"; + public const string SystemPublicAccess = "spa"; + public const string SystemAccessControl = "sace"; + public const string AcsServiceIdentity = "acs"; + public const string Unknown = "ukn"; // none of the above + } + + public static readonly IReadOnlyDictionary SubjectTypeMap; + + [GenerateSpecificConstants] + public static class SocialType + { + public const string GitHub = "ghb"; + public const string Unknown = "ukn"; + } + + public static readonly IReadOnlyDictionary SocialTypeMap; + + public static class ScopeUpdateFields + { + public const string Name = "name"; + } + + public static class GroupUpdateFields + { + public const string DisplayName = "displayName"; + public const string Description = "description"; + } + + public static class Links + { + public const string Self = "self"; + public const string Memberships = "memberships"; + public const string MembershipState = "membershipState"; + public const string StorageKey = "storageKey"; + public const string Groups = "groups"; + public const string Descriptor = "descriptor"; + public const string Subject = "subject"; + public const string Member = "member"; + public const string Conainer = "container"; + public const string Avatar = "avatar"; + } + + [GenerateSpecificConstants] + public static class OriginName + { + public const string ActiveDirectory = "ad"; + [GenerateConstant] + public const string AzureActiveDirectory = "aad"; + [GenerateConstant] + public const string MicrosoftAccount = "msa"; + [GenerateConstant] + public const string VisualStudioTeamServices = "vsts"; + [GenerateConstant] + public const string GitHubDirectory = "ghb"; + } + + public static class FederatedProviderName + { + public const string GitHub = "github.com"; + } + + public static class TraversalDepth + { + public const int Direct = 1; + public const int Expanded = -1; + } + + [GenerateSpecificConstants] + public static class UserMetaType + { + public const string Member = "member"; + [GenerateConstant] + public const string Guest = "guest"; + public const string CompanyAdministrator = "companyAdministrator"; + public const string HelpdeskAdministrator = "helpdeskAdministrator"; + } + + internal static class SubjectDescriptorPolicies + { + internal const int MaxSubjectTypeLength = 5; + internal const int MinSubjectTypeLength = 3; + internal const int MinSubjectDescriptorStringLength = 6; + internal const int MaxIdentifierLength = 256; + } + + internal static class SocialDescriptorPolicies + { + internal const int MaxSocialTypeLength = 4; + internal const int MinSocialTypeLength = SubjectDescriptorPolicies.MinSubjectTypeLength; + internal const int MinSocialDescriptorStringLength = SubjectDescriptorPolicies.MinSubjectDescriptorStringLength; + internal const int MaxIdentifierLength = SubjectDescriptorPolicies.MaxIdentifierLength; + } + + internal static class Version + { + internal const int Unspecified = -1; + } + + public const int MaximumRestResultSize = 500; + public const string JsonPatchMediaType = "application/json-patch+json"; + public const string JsonPatchOperationPathPrefix = "/"; + public const char SubjectListSeparator = ','; + public const char SubjectDescriptorPartsSeparator = '.'; + + // Social descriptor constants + public const char SocialListSeparator = ','; + public const char SocialDescriptorPartsSeparator = '.'; + public const string SocialDescriptorPrefix = "@"; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/ChangedIdentities.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/ChangedIdentities.cs new file mode 100644 index 00000000000..23cf16f5d6a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/ChangedIdentities.cs @@ -0,0 +1,119 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using Newtonsoft.Json; + +namespace GitHub.Services.Identity +{ + /// + /// Container class for changed identities + /// + [DataContract] + public class ChangedIdentities + { + [JsonConstructor] + private ChangedIdentities() + { + } + + public ChangedIdentities( + IList identities, + ChangedIdentitiesContext sequenceContext) : + this(identities, sequenceContext, false) + { + } + + public ChangedIdentities( + IList identities, + ChangedIdentitiesContext sequenceContext, + bool moreData) + { + Identities = identities; + SequenceContext = sequenceContext; + MoreData = moreData; + } + + /// + /// Changed Identities + /// + [DataMember] + public IList Identities { get; private set; } + + /// + /// Last Identity SequenceId + /// + [DataMember] + public ChangedIdentitiesContext SequenceContext { get; private set; } + + /// + /// More data available, set to true if pagesize is specified. + /// + [DataMember] + public bool MoreData { get; private set; } + } + + /// + /// Context class for changed identities + /// + [DataContract] + public class ChangedIdentitiesContext + { + [JsonConstructor] + private ChangedIdentitiesContext() + { + } + + public ChangedIdentitiesContext( + Int32 identitySequenceId, + Int32 groupSequenceId) : + this(identitySequenceId, groupSequenceId, ChangedIdentitiesContext.UnspecifiedSequenceId) + { + } + + public ChangedIdentitiesContext( + Int32 identitySequenceId, + Int32 groupSequenceId, + Int32 organizationIdentitySequenceId) : + this(identitySequenceId, groupSequenceId, organizationIdentitySequenceId, 0) + { + } + + public ChangedIdentitiesContext( + Int32 identitySequenceId, + Int32 groupSequenceId, + Int32 organizationIdentitySequenceId, + Int32 pageSize) + { + IdentitySequenceId = identitySequenceId; + GroupSequenceId = groupSequenceId; + OrganizationIdentitySequenceId = organizationIdentitySequenceId; + PageSize = pageSize; + } + + /// + /// Last Identity SequenceId + /// + [DataMember] + public Int32 IdentitySequenceId { get; private set; } + + /// + /// Last Group SequenceId + /// + [DataMember] + public Int32 GroupSequenceId { get; private set; } + + /// + /// Last Group OrganizationIdentitySequenceId + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 OrganizationIdentitySequenceId { get; private set; } + + /// + /// Page size + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 PageSize { get; private set; } + + private static int UnspecifiedSequenceId = -1; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/CreateGroupsInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/CreateGroupsInfo.cs new file mode 100644 index 00000000000..36b8649f347 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/CreateGroupsInfo.cs @@ -0,0 +1,26 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class CreateGroupsInfo + { + public CreateGroupsInfo() + { + } + + public CreateGroupsInfo(Guid scopeId, IList groups) + { + this.ScopeId = scopeId; + this.Groups = new List(groups); + } + + [DataMember] + public Guid ScopeId { get; private set; } + + [DataMember] + public List Groups { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/CreateScopeInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/CreateScopeInfo.cs new file mode 100644 index 00000000000..ff8b27875fd --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/CreateScopeInfo.cs @@ -0,0 +1,51 @@ +using GitHub.Services.Common; +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + //Technically speaking, this is poor REST + //a PUT or POST to a service to create an entity should + //contain that entity, in this case an IdentityScope + //however this contains extra fields not in an IdentityScope + [DataContract] + public class CreateScopeInfo + { + public CreateScopeInfo() + { + } + + internal CreateScopeInfo(Guid parentScopeId, GroupScopeType scopeType, String scopeName, String adminGroupName, String adminGroupDescription, Guid creatorId) + { + ArgumentUtility.CheckStringForNullOrEmpty(scopeName, "scopeName"); + ArgumentUtility.CheckStringForNullOrEmpty(adminGroupName, "adminGroupName"); + ArgumentUtility.CheckStringForNullOrEmpty(adminGroupDescription, "admingGroupDescription"); + + ParentScopeId = parentScopeId; + ScopeType = scopeType; + ScopeName = scopeName; + AdminGroupName = adminGroupName; + AdminGroupDescription = adminGroupDescription; + CreatorId = creatorId; + } + + [DataMember] + public Guid ParentScopeId { get; private set; } + + [DataMember] + public GroupScopeType ScopeType { get; private set; } + + [DataMember] + public String ScopeName { get; private set; } + + [DataMember] + public String AdminGroupName { get; private set; } + + [DataMember] + public String AdminGroupDescription { get; private set; } + + [DataMember(IsRequired=false, EmitDefaultValue=false)] + public Guid CreatorId { get; private set; } + + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityInfo.cs new file mode 100644 index 00000000000..b40e904c1c3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityInfo.cs @@ -0,0 +1,20 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class FrameworkIdentityInfo + { + [DataMember] + public FrameworkIdentityType IdentityType { get; set; } + + [DataMember] + public string Role { get; set; } + + [DataMember] + public string Identifier { get; set; } + + [DataMember] + public string DisplayName { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityType.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityType.cs new file mode 100644 index 00000000000..6e939ae6c65 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/FrameworkIdentityType.cs @@ -0,0 +1,10 @@ +namespace GitHub.Services.Identity +{ + public enum FrameworkIdentityType + { + None = 0, + ServiceIdentity = 1, + AggregateIdentity = 2, + ImportedIdentity = 3, + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/GroupMembership.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/GroupMembership.cs new file mode 100644 index 00000000000..311da6f166e --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/GroupMembership.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Runtime.Serialization; +using System.Xml; + +namespace GitHub.Services.Identity +{ + /// + /// + /// + [DataContract] + public sealed class GroupMembership + { + public GroupMembership(Guid queriedId, Guid id, IdentityDescriptor descriptor) + { + QueriedId = queriedId; + Id = id; + Descriptor = descriptor; + Active = true; + } + + [DataMember] + public Guid QueriedId + { + get; + set; + } + + [DataMember] + public Guid Id + { + get + { + return m_id; + } + + set + { + m_id = value; + } + } + + [DataMember] + public IdentityDescriptor Descriptor + { + get; + set; + } + + [DataMember] + public Boolean Active + { + get; + set; + } + + + private Guid m_id; + + public GroupMembership Clone() + { + return new GroupMembership( + queriedId: QueriedId, + id: Id, + descriptor: Descriptor == null ? null : new IdentityDescriptor(Descriptor)) + { + Active = this.Active + }; + } + + public override string ToString() + { + return string.Format("[Id = {0}, Descriptor = {1}, Active = {2}, QueriedId = {3}]", Id, Descriptor, Active, QueriedId); + } + } + + [CollectionDataContract(Name = "GroupMemberships", ItemName = "GroupMembership")] + public class GroupMembershipCollection : List + { + public GroupMembershipCollection() + { + } + + public GroupMembershipCollection(IList source) + : base(source) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IReadOnlyVssIdentity.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IReadOnlyVssIdentity.cs new file mode 100644 index 00000000000..bda88d506a6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IReadOnlyVssIdentity.cs @@ -0,0 +1,23 @@ +using System; + +namespace GitHub.Services.Identity +{ + public interface IReadOnlyVssIdentity + { + Guid Id { get; } + + IdentityDescriptor Descriptor { get; } + + bool IsContainer { get; } + + bool IsExternalUser { get; } + + string DisplayName { get; } + + string ProviderDisplayName { get; } + + string CustomDisplayName { get; } + + TValue GetProperty(string name, TValue defaultValue); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IVssIdentity.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IVssIdentity.cs new file mode 100644 index 00000000000..e8fb0fa52c1 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IVssIdentity.cs @@ -0,0 +1,15 @@ +using System; + +namespace GitHub.Services.Identity +{ + public interface IVssIdentity : IReadOnlyVssIdentity + { + new IdentityDescriptor Descriptor { get; set; } + + new string ProviderDisplayName { get; set; } + + new string CustomDisplayName { get; set; } + + void SetProperty(string name, object value); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/Identity.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/Identity.cs new file mode 100644 index 00000000000..874022c0b69 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/Identity.cs @@ -0,0 +1,557 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Runtime.Serialization; +using System.Xml; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Identity +{ + //The only PATCH-able property on this class is CustomDisplayName, however there are more read/write properties + //because they get set by various providers in the Framework dll, in general Framework should not have internalsvisibleto to this dll + //CONSIDER: Should providers be in GitHub.Services.Identity instead? + [DataContract] + public sealed class Identity : IdentityBase, ISecuredObject + { + public Identity() : this(null) + { + } + + private Identity(PropertiesCollection properties) : base(properties) + { + } + + public Identity Clone(bool includeMemberships) + { + PropertiesCollection properties = new PropertiesCollection(Properties, validateExisting: false); + + Identity clone = new Identity(properties) + { + Id = Id, + Descriptor = new IdentityDescriptor(Descriptor), + SubjectDescriptor = SubjectDescriptor, + SocialDescriptor = SocialDescriptor, + ProviderDisplayName = ProviderDisplayName, + CustomDisplayName = CustomDisplayName, + IsActive = IsActive, + UniqueUserId = UniqueUserId, + IsContainer = IsContainer, + ResourceVersion = ResourceVersion, + MetaTypeId = MetaTypeId + }; + + if (includeMemberships) + { + clone.Members = CloneDescriptors(Members); + clone.MemberOf = CloneDescriptors(MemberOf); + clone.MemberIds = MemberIds?.ToList(); + clone.MemberOfIds = MemberOfIds?.ToList(); + } + + clone.MasterId = MasterId; + + return clone; + } + + public Identity Clone() + { + return this.Clone(true); + } + + internal static Identity FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + Identity obj = new Identity(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + bool empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "DisplayName": + obj.ProviderDisplayName = reader.Value; + break; + case "IsActive": + obj.IsActive = XmlConvert.ToBoolean(reader.Value); + break; + case "IsContainer": + obj.IsContainer = XmlConvert.ToBoolean(reader.Value); + break; + case "TeamFoundationId": + obj.Id = XmlConvert.ToGuid(reader.Value); + break; + case "UniqueName": + // We don't have this property on VSIdentity + //obj.UniqueName = reader.Value; + break; + case "UniqueUserId": + obj.UniqueUserId = XmlConvert.ToInt32(reader.Value); + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + case "Attributes": + KeyValueOfStringString[] attributes = XmlUtility.ArrayOfObjectFromXml(serviceProvider, reader, "KeyValueOfStringString", false, KeyValueOfStringString.FromXml); + if (attributes != null && obj.Properties != null) + { + foreach (KeyValueOfStringString attribute in attributes) + { + obj.Properties[attribute.Key] = attribute.Value; + } + } + break; + case "Descriptor": + obj.Descriptor = IdentityDescriptor.FromXml(serviceProvider, reader); + break; + case "LocalProperties": + // Since we're only using the SOAP serializer for bootstrap, we won't support properties + //obj.m_localPropertiesSet = Helper.ArrayOfPropertyValueFromXml(serviceProvider, reader, false); + reader.ReadOuterXml(); + break; + case "MemberOf": + obj.MemberOf = XmlUtility.ArrayOfObjectFromXml(serviceProvider, reader, "IdentityDescriptor", false, IdentityDescriptor.FromXml); + break; + case "Members": + obj.Members = XmlUtility.ArrayOfObjectFromXml(serviceProvider, reader, "IdentityDescriptor", false, IdentityDescriptor.FromXml); + break; + case "Properties": + // Since we're only using the SOAP serializer for bootstrap, we won't support properties + //obj.m_propertiesSet = Helper.ArrayOfPropertyValueFromXml(serviceProvider, reader, false); + reader.ReadOuterXml(); + break; + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + + #region ISecuredObject + public Guid NamespaceId => GraphSecurityConstants.NamespaceId; + + public int RequiredPermissions => GraphSecurityConstants.ReadByPublicIdentifier; + + public string GetToken() => GraphSecurityConstants.SubjectsToken; + #endregion + + private static ICollection CloneDescriptors(IEnumerable descriptors) + { + return descriptors?.Select(item => new IdentityDescriptor(item)).ToList(); + } + } + + /// + /// Base Identity class to allow "trimmed" identity class in the GetConnectionData API + /// Makes sure that on-the-wire representations of the derived classes are compatible with each other + /// (e.g. Server responds with PublicIdentity object while client deserializes it as Identity object) + /// Derived classes should not have additional [DataMember] properties + /// + [DebuggerDisplay("Name: {DisplayName} ID:{Id}")] + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public abstract class IdentityBase : IVssIdentity + { + protected IdentityBase(PropertiesCollection properties) + { + if (properties == null) + { + Properties = new PropertiesCollection(); + } + else + { + Properties = properties; + } + + ResourceVersion = IdentityConstants.DefaultResourceVersion; + + // Initialize this as Unknown (255) so the default integer value of MetaTypeId isn't set as Member (0) + MetaType = IdentityMetaType.Unknown; + } + + [DataMember] + public Guid Id + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IdentityDescriptor Descriptor + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public SubjectDescriptor SubjectDescriptor + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public SocialDescriptor SocialDescriptor + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + //***************************************************************************************************************** + /// + /// The display name for the identity as specified by the source identity provider. + /// + //***************************************************************************************************************** + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string ProviderDisplayName + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + //***************************************************************************************************************** + /// + /// The custom display name for the identity (if any). Setting this property to an empty string will clear the existing + /// custom display name. Setting this property to null will not affect the existing persisted value + /// (since null values do not get sent over the wire or to the database) + /// + //***************************************************************************************************************** + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public string CustomDisplayName { get; set; } + + //***************************************************************************************************************** + /// + /// This is a computed property equal to the CustomDisplayName (if set) or the ProviderDisplayName. + /// + //***************************************************************************************************************** + public string DisplayName + { + get + { + if (!string.IsNullOrEmpty(CustomDisplayName)) + { + return CustomDisplayName; + } + + return ProviderDisplayName; + } + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool IsActive { get; set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public int UniqueUserId + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool IsContainer + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ICollection Members + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ICollection MemberOf + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [EditorBrowsable(EditorBrowsableState.Never)] + public ICollection MemberIds { get; set; } + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public ICollection MemberOfIds { get; set; } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid MasterId { get; set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public PropertiesCollection Properties { get; private set; } + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool ValidateProperties + { + get + { + return this.Properties.ValidateNewValues; + } + set + { + this.Properties.ValidateNewValues = value; + } + } + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool IsExternalUser + { + get + { + Guid domain; + return Guid.TryParse(GetProperty(IdentityAttributeTags.Domain, string.Empty), out domain) && + Descriptor.IdentityType != IdentityConstants.ServiceIdentityType && + Descriptor.IdentityType != IdentityConstants.AggregateIdentityType && + Descriptor.IdentityType != IdentityConstants.ImportedIdentityType; + } + } + + /// + /// Get the Id of the containing scope + /// + [IgnoreDataMember] + public Guid LocalScopeId + { + get + { + return GetProperty(IdentityAttributeTags.LocalScopeId, default(Guid)); + } + } + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool IsBindPending => + this.Descriptor != null && + IdentityConstants.BindPendingIdentityType.Equals(Descriptor.IdentityType); + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool IsClaims => + this.Descriptor != null && + IdentityConstants.ClaimsType.Equals(Descriptor.IdentityType); + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool IsImported => + this.Descriptor != null && + IdentityConstants.ImportedIdentityType.Equals(Descriptor.IdentityType); + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool IsServiceIdentity => + this.Descriptor != null && + IdentityConstants.ServiceIdentityType.Equals(Descriptor.IdentityType); + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public int ResourceVersion { get; set; } + + [DataMember(IsRequired = false, EmitDefaultValue = true)] + public int MetaTypeId { get; set; } + + public IdentityMetaType MetaType + { + get { return (IdentityMetaType)MetaTypeId; } + set { MetaTypeId = (int)value; } + } + + [IgnoreDataMember] + [EditorBrowsable(EditorBrowsableState.Never)] + public bool IsCspPartnerUser => + this.Descriptor != null && + this.Descriptor.IsCspPartnerIdentityType(); + + //***************************************************************************************************************** + /// + /// Generic Property accessor. Returns default value of T if not found + /// + //***************************************************************************************************************** + public T GetProperty(string name, T defaultValue) + { + if (Properties != null && Properties.TryGetValidatedValue(name, out T value)) + { + return value; + } + else + { + return defaultValue; + } + } + + //***************************************************************************************************************** + /// + /// Property accessor. value will be null if not found. + /// + //***************************************************************************************************************** + public bool TryGetProperty(string name, out object value) + { + value = null; + return Properties != null && Properties.TryGetValue(name, out value); + } + + //***************************************************************************************************************** + /// + /// Internal function to initialize persisted property. + /// + //***************************************************************************************************************** + public void SetProperty(string name, object value) + { + m_hasModifiedProperties = true; + + //don't remove properties with null + //vals, just set them to null... + Properties[name] = value; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public bool HasModifiedProperties => m_hasModifiedProperties; + + [EditorBrowsable(EditorBrowsableState.Never)] + public void ResetModifiedProperties() + { + m_hasModifiedProperties = false; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public void SetAllModifiedProperties() + { + m_hasModifiedProperties = true; + } + + public override bool Equals(object obj) + { + IdentityBase other = obj as IdentityBase; + if (other != null) + { + return (Id == other.Id && + IdentityDescriptorComparer.Instance.Equals(Descriptor, other.Descriptor) && + string.Equals(ProviderDisplayName, other.ProviderDisplayName, StringComparison.OrdinalIgnoreCase) && + string.Equals(CustomDisplayName, other.CustomDisplayName, StringComparison.OrdinalIgnoreCase) && + IsActive == other.IsActive && + UniqueUserId == other.UniqueUserId && + IsContainer == other.IsContainer); + } + + return false; + } + + public override int GetHashCode() + { + if (Descriptor == null) + { + return 0; + } + return Descriptor.GetHashCode(); + } + + public override string ToString() + { + return string.Format(CultureInfo.InvariantCulture, "Identity {0} (IdentityType: {1}; Identifier: {2}; DisplayName: {3})", + Id, + (Descriptor == null) ? string.Empty : Descriptor.IdentityType, + (Descriptor == null) ? string.Empty : Descriptor.Identifier, + DisplayName); + } + + private bool m_hasModifiedProperties; + } + + internal class KeyValueOfStringString + { + public string Key { get; set; } + + public string Value { get; set; } + + internal static KeyValueOfStringString FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + KeyValueOfStringString obj = new KeyValueOfStringString(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + bool empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + case "Key": + obj.Key = XmlUtility.StringFromXmlElement(reader); + break; + case "Value": + obj.Value = XmlUtility.StringFromXmlElement(reader); + break; + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityBatchInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityBatchInfo.cs new file mode 100644 index 00000000000..99727ca8f70 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityBatchInfo.cs @@ -0,0 +1,76 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class IdentityBatchInfo + { + private IdentityBatchInfo() + { + } + + public IdentityBatchInfo(IList subjectDescriptors, QueryMembership queryMembership = QueryMembership.None, IEnumerable propertyNames = null, bool includeRestrictedVisibility = false) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(subjectDescriptors, nameof(subjectDescriptors)); + + this.SubjectDescriptors = new List(subjectDescriptors); + this.QueryMembership = queryMembership; + this.PropertyNames = propertyNames; + this.IncludeRestrictedVisibility = includeRestrictedVisibility; + } + + public IdentityBatchInfo(IList descriptors, QueryMembership queryMembership = QueryMembership.None, IEnumerable propertyNames = null, bool includeRestrictedVisibility = false) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(descriptors, nameof(descriptors)); + + this.Descriptors = new List(descriptors); + this.QueryMembership = queryMembership; + this.PropertyNames = propertyNames; + this.IncludeRestrictedVisibility = includeRestrictedVisibility; + } + + public IdentityBatchInfo(IList identityIds, QueryMembership queryMembership = QueryMembership.None, IEnumerable propertyNames = null, bool includeRestrictedVisibility = false) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(identityIds, nameof(identityIds)); + + this.IdentityIds = new List(identityIds); + this.QueryMembership = queryMembership; + this.PropertyNames = propertyNames; + this.IncludeRestrictedVisibility = includeRestrictedVisibility; + } + + public IdentityBatchInfo(IList socialDescriptors, QueryMembership queryMembership = QueryMembership.None, IEnumerable propertyNames = null, bool includeRestrictedVisibility = false) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(socialDescriptors, nameof(socialDescriptors)); + + this.SocialDescriptors = new List(socialDescriptors); + this.QueryMembership = queryMembership; + this.PropertyNames = propertyNames; + this.IncludeRestrictedVisibility = includeRestrictedVisibility; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public List Descriptors { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public List SubjectDescriptors { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public List IdentityIds { get; private set; } + + [DataMember] + public QueryMembership QueryMembership { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IEnumerable PropertyNames { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool IncludeRestrictedVisibility { get; private set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public List SocialDescriptors { get; private set; } + } +} \ No newline at end of file diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityCollections.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityCollections.cs new file mode 100644 index 00000000000..5bc30bcc890 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityCollections.cs @@ -0,0 +1,45 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [CollectionDataContract(Name = "Identities", ItemName = "Identity")] + public class IdentitiesCollection : List + { + public IdentitiesCollection() + { + } + + public IdentitiesCollection(IList source) + : base(source) + { + } + } + + [CollectionDataContract(Name = "Descriptors", ItemName = "Descriptor")] + public class IdentityDescriptorCollection : List + { + public IdentityDescriptorCollection() + { + } + + public IdentityDescriptorCollection(IList source) + : base(source) + { + } + } + + [CollectionDataContract(Name = "IdentityIds", ItemName = "IdentityId")] + public class IdentityIdCollection : List + { + public IdentityIdCollection() + { + } + + public IdentityIdCollection(IList source) + : base(source) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityDescriptor.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityDescriptor.cs new file mode 100644 index 00000000000..1cf584d1b19 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityDescriptor.cs @@ -0,0 +1,579 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Runtime.Serialization; +using System.Xml; +using System.Xml.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Identity +{ + /// + /// An Identity descriptor is a wrapper for the identity type (Windows SID, Passport) + /// along with a unique identifier such as the SID or PUID. + /// + /// + /// This is the only legacy type moved into VSS (by necessity, it is used everywhere) + /// so it must support both Xml and DataContract serialization + /// + [XmlInclude(typeof(ReadOnlyIdentityDescriptor))] + [KnownType(typeof(ReadOnlyIdentityDescriptor))] + [TypeConverter(typeof(IdentityDescriptorConverter))] + [DataContract] + public class IdentityDescriptor : IEquatable, IComparable + { + /// + /// Default constructor, for Xml serializer only. + /// + public IdentityDescriptor() { } + + /// + /// Constructor + /// + public IdentityDescriptor(string identityType, string identifier, object data) + : this(identityType, identifier) + { + this.Data = data; + } + + /// + /// Constructor + /// + public IdentityDescriptor(string identityType, string identifier) + { + //Validation in Setters... + IdentityType = identityType; + Identifier = identifier; + } + + /// + /// Copy Constructor + /// + public IdentityDescriptor(IdentityDescriptor clone) + { + IdentityType = clone.IdentityType; + Identifier = clone.Identifier; + } + + /// + /// Type of descriptor (for example, Windows, Passport, etc.). + /// + [XmlAttribute("identityType")] + [DataMember] + public virtual string IdentityType + { + get + { + return m_identityType ?? IdentityConstants.UnknownIdentityType; + } + set + { + ValidateIdentityType(value); + m_identityType = NormalizeIdentityType(value); + + // Drop any existing data + Data = null; + } + } + + /// + /// The unique identifier for this identity, not exceeding 256 chars, + /// which will be persisted. + /// + [XmlAttribute("identifier")] + [DataMember] + public virtual string Identifier + { + get + { + return m_identifier; + } + set + { + ValidateIdentifier(value); + m_identifier = value; + + // Drop any existing data + Data = null; + } + } + + /// + /// Any additional data specific to identity type. + /// + /// + /// Not serialized under either method. + /// + [XmlIgnore] + public virtual object Data { get; set; } + + public override string ToString() + { + return string.Concat(m_identityType, IdentityConstants.IdentityDescriptorPartsSeparator, m_identifier); + } + + public static IdentityDescriptor FromString(string identityDescriptorString) + { + if (string.IsNullOrEmpty(identityDescriptorString)) + { + return null; + } + + string[] tokens; + try + { + tokens = identityDescriptorString.Split(new[] { IdentityConstants.IdentityDescriptorPartsSeparator }, 2, StringSplitOptions.RemoveEmptyEntries); + } + catch + { + return new IdentityDescriptor(IdentityConstants.UnknownIdentityType, identityDescriptorString); + } + + if (tokens.Length == 2) + { + return new IdentityDescriptor(tokens[0], tokens[1]); + } + + return new IdentityDescriptor(IdentityConstants.UnknownIdentityType, identityDescriptorString); + } + + //Copied from TFCommonUtil.cs + private static void ValidateIdentityType(string identityType) + { + if (string.IsNullOrEmpty(identityType)) + { + throw new ArgumentNullException(nameof(identityType)); + } + + if (identityType.Length > MaxTypeLength) + { + throw new ArgumentOutOfRangeException(nameof(identityType)); + } + } + + private static String NormalizeIdentityType(String identityType) + { + String normalizedIdentityType; + + // Look up the string in the static dictionary. If we get a hit, then + // we'll use that string for the identity type instead. This saves memory + // as well as improves compare/equals performance when comparing descriptors, + // since Object.ReferenceEquals will return true a lot more often + if (!IdentityConstants.IdentityTypeMap.TryGetValue(identityType, out normalizedIdentityType)) + { + normalizedIdentityType = identityType; + } + + return normalizedIdentityType; + } + + private static void ValidateIdentifier(string identifier) + { + if (string.IsNullOrEmpty(identifier)) + { + throw new ArgumentNullException(nameof(identifier)); + } + + if (identifier.Length > MaxIdLength) + { + throw new ArgumentOutOfRangeException(nameof(identifier)); + } + } + + internal static IdentityDescriptor FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + string identifier = string.Empty; + string identityType = string.Empty; + + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + bool empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "identifier": + identifier = reader.Value; + break; + case "identityType": + identityType = reader.Value; + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + IdentityDescriptor obj = new IdentityDescriptor(identityType, identifier); + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + + return obj; + } + + protected string m_identityType; + private string m_identifier; + + private const int MaxIdLength = 256; + private const int MaxTypeLength = 128; + + #region Equality and Compare + + // IEquatable + public bool Equals(IdentityDescriptor other) => IdentityDescriptorComparer.Instance.Equals(this, other); + + // IComparable + public int CompareTo(IdentityDescriptor other) => IdentityDescriptorComparer.Instance.Compare(this, other); + + public override bool Equals(object obj) => this.Equals(obj as IdentityDescriptor); + + public override int GetHashCode() => IdentityDescriptorComparer.Instance.GetHashCode(this); + + public static bool operator ==(IdentityDescriptor x, IdentityDescriptor y) + { + return IdentityDescriptorComparer.Instance.Equals(x, y); + } + + public static bool operator !=(IdentityDescriptor x, IdentityDescriptor y) + { + return !IdentityDescriptorComparer.Instance.Equals(x, y); + } + + #endregion // Equality and Compare + } + + /// + /// Class used for comparing IdentityDescriptors + /// + public class IdentityDescriptorComparer : IComparer, IEqualityComparer + { + private IdentityDescriptorComparer() + { + } + + /// + /// Compares two instances of IdentityDescriptor. + /// + /// The first IdentityDescriptor to compare. + /// The second IdentityDescriptor to compare. + /// Compares two specified IdentityDescriptor objects and returns an integer that indicates their relative position in the sort order. + public int Compare(IdentityDescriptor x, IdentityDescriptor y) + { + if (Object.ReferenceEquals(x, y)) + { + return 0; + } + + if (Object.ReferenceEquals(x, null) && !Object.ReferenceEquals(y, null)) + { + return -1; + } + + if (!Object.ReferenceEquals(x, null) && Object.ReferenceEquals(y, null)) + { + return 1; + } + + int retValue = StringComparer.OrdinalIgnoreCase.Compare(x.IdentityType, y.IdentityType); + + //have to maintain equivalence for service principals while we are migrating them + if (0 != retValue && + ((x.IsSystemServicePrincipalType() && y.IsClaimsIdentityType()) || + (y.IsSystemServicePrincipalType() && x.IsClaimsIdentityType()))) + { + retValue = 0; + } + + if (0 == retValue) + { + retValue = StringComparer.OrdinalIgnoreCase.Compare(x.Identifier, y.Identifier); + } + + return retValue; + } + + public bool Equals(IdentityDescriptor x, IdentityDescriptor y) + { + if (Object.ReferenceEquals(x, y)) + { + return true; + } + + return 0 == Compare(x, y); + } + + public int GetHashCode(IdentityDescriptor obj) + { + int hashCode = 7443; + string identityType = obj.IdentityType; + + //until all service principals are in the system store, we treat them as Claims identities for hash code + if(obj.IsSystemServicePrincipalType()) + { + identityType = IdentityConstants.ClaimsType; + } + + hashCode = 524287 * hashCode + StringComparer.OrdinalIgnoreCase.GetHashCode(identityType); + hashCode = 524287 * hashCode + StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Identifier ?? string.Empty); + + return hashCode; + } + + public static IdentityDescriptorComparer Instance { get; } = new IdentityDescriptorComparer(); + } + + // Keep this in sync with the SubjectDescriptorExtensions to avoid extra casting/conversions + public static class IdentityDescriptorExtensions + { + public static bool IsTeamFoundationType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.TeamFoundationType); + } + + public static bool IsWindowsType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.WindowsType); + } + + public static bool IsUnknownIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.UnknownIdentityType); + } + + public static bool IsSystemServicePrincipalType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.System_ServicePrincipal); + } + + public static bool IsClaimsIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.ClaimsType); + } + + public static bool IsImportedIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.ImportedIdentityType); + } + + public static bool IsServiceIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.ServiceIdentityType); + } + + public static bool IsBindPendingType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.BindPendingIdentityType); + } + + public static bool IsAggregateIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.AggregateIdentityType); + } + + public static bool IsUnauthenticatedIdentity(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.UnauthenticatedIdentityType); + } + + public static bool IsSubjectStoreType(this IdentityDescriptor identityDescriptor) + { + return ReferenceEquals(identityDescriptor.IdentityType, IdentityConstants.System_License) + || ReferenceEquals(identityDescriptor.IdentityType, IdentityConstants.System_Scope) + || ReferenceEquals(identityDescriptor.IdentityType, IdentityConstants.System_ServicePrincipal) + || ReferenceEquals(identityDescriptor.IdentityType, IdentityConstants.System_WellKnownGroup) + || ReferenceEquals(identityDescriptor.IdentityType, IdentityConstants.System_CspPartner); + } + + /// + /// true if the descriptor matches any of the passed types + /// + /// + /// + /// + public static bool IsIdentityType(this IdentityDescriptor identityDescriptor, IEnumerable identityTypes) + { + return identityTypes.Any(id => StringComparer.OrdinalIgnoreCase.Equals(identityDescriptor.IdentityType, id)); + } + + public static bool IsIdentityType(this IdentityDescriptor identityDescriptor, string identityType) + { + return StringComparer.OrdinalIgnoreCase.Equals(identityDescriptor.IdentityType, identityType); + } + + public static bool IsCspPartnerIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.CspPartnerIdentityType); + } + + public static bool IsGroupScopeType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.GroupScopeType); + } + + public static bool IsSystemLicenseType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.System_License); + } + + public static bool IsSystemScopeType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.System_Scope); + } + + public static bool IsSystemPublicAccessType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.System_PublicAccess); + } + + public static bool IsSystemAccessControlType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.System_AccessControl); + } + + public static bool IsServerTestIdentityType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.ServerTestIdentity); + } + + public static bool IsSystemCspPartnerType(this IdentityDescriptor identityDescriptor) + { + return identityDescriptor.IsIdentityType(IdentityConstants.System_CspPartner); + } + } + + [DataContract] + public sealed class ReadOnlyIdentityDescriptor : IdentityDescriptor + { + /// + /// Default constructor, for Xml serializer only. + /// + public ReadOnlyIdentityDescriptor() { } + + public ReadOnlyIdentityDescriptor(string identityType, string identifier, object data) + : base(identityType, identifier, data) + { + } + + [XmlAttribute("identityType")] + [DataMember] + public override string IdentityType + { + get + { + return base.IdentityType; + } + set + { + if (m_identityType != null) + { + throw new InvalidOperationException(IdentityResources.FieldReadOnly(nameof(IdentityType))); + } + base.IdentityType = value; + } + } + + [XmlAttribute("identifier")] + [DataMember] + public override string Identifier + { + get + { + return base.Identifier; + } + set + { + if (!string.IsNullOrEmpty(base.Identifier)) + { + throw new InvalidOperationException(IdentityResources.FieldReadOnly(nameof(Identifier))); + } + base.Identifier = value; + } + } + + [XmlIgnore] + public override object Data + { + get + { + return base.Data; + } + set + { + if (base.Data != null) + { + throw new InvalidOperationException(IdentityResources.FieldReadOnly(nameof(Data))); + } + base.Data = value; + } + } + } + + /// + /// Converter to support data contract serialization. + /// + public class IdentityDescriptorConverter : TypeConverter + { + public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) + { + return sourceType.Equals(typeof(string)) || base.CanConvertFrom(context, sourceType); + } + + public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) + { + return destinationType.Equals(typeof(string)) || base.CanConvertTo(context, destinationType); + } + + public override object ConvertFrom(ITypeDescriptorContext context, CultureInfo culture, object value) + { + if (value is string) + { + string descriptor = value as string; + string[] tokens = descriptor.Split(new[] { IdentityConstants.IdentityDescriptorPartsSeparator }, 2, StringSplitOptions.RemoveEmptyEntries); + + if (tokens.Length == 2) + { + return new IdentityDescriptor(tokens[0], tokens[1]); + } + } + + return base.ConvertFrom(context, culture, value); + } + + public override object ConvertTo(ITypeDescriptorContext context, CultureInfo culture, object value, Type destinationType) + { + if (destinationType.Equals(typeof(string))) + { + IdentityDescriptor descriptor = value as IdentityDescriptor; + + return descriptor?.ToString() ?? string.Empty; + } + + return base.ConvertTo(context, culture, value, destinationType); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityEnumerations.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityEnumerations.cs new file mode 100644 index 00000000000..5947e4ba514 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityEnumerations.cs @@ -0,0 +1,196 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Runtime.Serialization; +using System.Xml.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public enum GroupScopeType + { + [EnumMember, XmlEnum("0")] + Generic = 0, + + [EnumMember, XmlEnum("1")] + ServiceHost = 1, + + [EnumMember, XmlEnum("2")] + TeamProject = 2 + } + + + // This is the *same* as IdentitySearchFactor in TFCommon + // Changed the name to avoid conflicts + [DataContract] + public enum IdentitySearchFilter + { + /// + /// NT account name (domain\alias) + /// + [EnumMember] + AccountName = 0, + + /// + /// Display name + /// + [EnumMember] + DisplayName = 1, + + /// + /// Find project admin group + /// + [EnumMember] + AdministratorsGroup = 2, + + /// + /// Find the identity using the identifier + /// + [EnumMember] + Identifier = 3, + + /// + /// Email address + /// + [EnumMember] + MailAddress = 4, + + /// + /// A general search for an identity. + /// + /// + /// This is the default search factor for shorter overloads of ReadIdentity, and typically the correct choice for user input. + /// + /// Use the general search factor to find one or more identities by one of the following properties: + /// * Display name + /// * account name + /// * UniqueName + /// + /// UniqueName may be easier to type than display name. It can also be used to indicate a single identity when two or more identities share the same display name (e.g. "John Smith") + /// + [EnumMember] + General = 5, + + /// + /// Alternate login username + /// + [EnumMember] + Alias = 6, + + /// + /// Find identity using Domain/TenantId + /// + [EnumMember] + [Obsolete("Use read identities to get member of collection valid users group instead.")] + Domain = 7, + + /// + /// Find identity using DirectoryAlias + /// + [EnumMember] + DirectoryAlias = 8, + + /// + /// Find a team group by its name + /// + [Obsolete("Deprecating TeamGroupName, use LocalGroupName instead and filter out non teams groups from the result")] + [EnumMember] + TeamGroupName = 9, + + /// + /// Find a local group (i.e. VSTS or TFS rather than AAD or AD group) by its name + /// + [EnumMember] + LocalGroupName = 10, + } + + // This enum is as an index for IMS identity caches. + // This is the *same* as MembershipQuery in TFCommon + // Changed the name to avoid conflicts + [DataContract] + public enum QueryMembership + { + // These enumeration values should run from zero to N, with no gaps. + // IdentityHostCache uses these values as indexes. + + /// + /// Query will not return any membership data + /// + [EnumMember] + None = 0, + + /// + /// Query will return only direct membership data + /// + [EnumMember] + Direct = 1, + + /// + /// Query will return expanded membership data + /// + [EnumMember] + Expanded = 2, + + /// + /// Query will return expanded up membership data (parents only) + /// + [EnumMember] + ExpandedUp = 3, + + /// + /// Query will return expanded down membership data (children only) + /// + [EnumMember] + ExpandedDown = 4 + + // Dev10 had the public value "Last = 3", as an indicator of the end of the enumeration. + // Dev14 supports public enum value "ExpandedDown = 4" , as an indicator of the end of the enumeration. + } + + // Designates "special" VSS groups. + [DataContract] + public enum SpecialGroupType + { + [EnumMember] + Generic = 0, + + [EnumMember] + AdministrativeApplicationGroup, + + [EnumMember] + ServiceApplicationGroup, + + [EnumMember] + EveryoneApplicationGroup, + + [EnumMember] + LicenseesApplicationGroup, + + [EnumMember] + AzureActiveDirectoryApplicationGroup, + + [EnumMember] + AzureActiveDirectoryRole, + } + + [Flags] + public enum ReadIdentitiesOptions + { + None = 0, + FilterIllegalMemberships = 1 + } + + public enum RestoreProjectOptions + { + /// + /// Brings back all memberships whose members are not owned by the scope + /// + All = 0, + + /// + /// Brings back some memberships whose members are not owned by the scope. + /// The membership will be a subset of All with the additional requirement + /// that the members have visibilty into the project collection scope. + /// + Visible = 1, + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityMetaType.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityMetaType.cs new file mode 100644 index 00000000000..3ffcf165d4e --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityMetaType.cs @@ -0,0 +1,12 @@ + +namespace GitHub.Services.Identity +{ + public enum IdentityMetaType + { + Member = 0, + Guest = 1, + CompanyAdministrator = 2, + HelpdeskAdministrator = 3, + Unknown = 255, // When the type isn't known (default value) + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityScope.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityScope.cs new file mode 100644 index 00000000000..2f9db5f93b4 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityScope.cs @@ -0,0 +1,146 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class IdentityScope + { + internal IdentityScope() + { + } + + //Copy-Constructor + internal IdentityScope(IdentityScope other) + : this(other.Id, other.Name) + { + Administrators = other.Administrators == null ? null : new IdentityDescriptor(other.Administrators); + IsActive = other.IsActive; + IsGlobal = other.IsGlobal; + LocalScopeId = other.LocalScopeId; + ParentId = other.ParentId; + ScopeType = other.ScopeType; + SecuringHostId = other.SecuringHostId; + } + + //Constructor used for the rename operation + internal IdentityScope(Guid id, String name) + { + ArgumentUtility.CheckForEmptyGuid(id, "id"); + ArgumentUtility.CheckStringForNullOrEmpty(name, "name"); + this.Id = id; + this.Name = name; + } + + [DataMember(IsRequired=true)] + public Guid Id + { + get; + + internal set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + internal Guid LocalScopeId + { + get; + + set; + } + + [DataMember(IsRequired=false, EmitDefaultValue=false)] + public Guid ParentId + { + get; + + internal set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public GroupScopeType ScopeType + { + get; + + internal set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IdentityDescriptor Administrators + { + get; + + internal set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Boolean IsGlobal + { + get; + internal set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid SecuringHostId + { + get; + + internal set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Boolean IsActive + { + get; + + internal set; + } + + public IdentityScope Clone() + { + return new IdentityScope(this); + } + + public override string ToString() + { + return $"[Id={Id}, Name={Name}, LocalScopeId={LocalScopeId}, ParentId={ParentId}, ScopeType={ScopeType}, SecuringHostId={SecuringHostId}, Administrators={Administrators}, IsActive={IsActive}, IsGlobal={IsGlobal}]"; + } + + private SubjectDescriptor subjectDescriptor; + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public SubjectDescriptor SubjectDescriptor + { + get + { + if (subjectDescriptor == default(SubjectDescriptor)) + { + subjectDescriptor = new SubjectDescriptor(Graph.Constants.SubjectType.GroupScopeType, Id.ToString()); + } + + return subjectDescriptor; + } + } + } + + [CollectionDataContract(Name = "Scopes", ItemName = "Scope")] + public class IdentityScopeCollection : List + { + public IdentityScopeCollection() + { + } + + public IdentityScopeCollection(IList source) + : base(source) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySelf.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySelf.cs new file mode 100644 index 00000000000..9c8c9df2e1a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySelf.cs @@ -0,0 +1,105 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + /// + /// Identity information. + /// + [DebuggerDisplay("{DisplayName}")] + [DataContract] + public class IdentitySelf + { + /// + /// This is the VSID of the home tenant profile. If the profile is signed into the home tenant or if the profile + /// has no tenants then this Id is the same as the Id returned by the profile/profiles/me endpoint. Going forward + /// it is recommended that you use the combined values of Origin, OriginId and Domain to uniquely identify a user + /// rather than this Id. + /// + [DataMember] + public Guid Id + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The display name. For AAD accounts with multiple tenants this is the display name of the profile in the home tenant. + /// + [DataMember] + public String DisplayName + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The UserPrincipalName (UPN) of the account. This value comes from the source provider. + /// + [DataMember] + public string AccountName + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The type of source provider for the origin identifier. + /// For MSA accounts this is "msa". For AAD accounts this is "aad". + /// + [DataMember] + public string Origin + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The unique identifier from the system of origin. If there are multiple tenants this is the + /// unique identifier of the account in the home tenant. + /// (For MSA this is the PUID in hex notation, for AAD this is the object id.) + /// + [DataMember] + public string OriginId + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// This represents the name of the container of origin. + /// For AAD accounts this is the tenantID of the home tenant. + /// For MSA accounts this is the string "Windows Live ID". + /// + [DataMember] + public string Domain + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// For AAD accounts this is all of the tenants that this account is a member of. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IEnumerable Tenants + { + get; set; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySnapshot.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySnapshot.cs new file mode 100644 index 00000000000..93954193595 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentitySnapshot.cs @@ -0,0 +1,81 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class IdentitySnapshot + { + public IdentitySnapshot() + { + } + + public IdentitySnapshot(Guid scopeId) + { + ScopeId = scopeId; + Scopes = new List(); + Groups = new List(); + Memberships = new List(); + IdentityIds = new List(); + } + + [DataMember] + public Guid ScopeId + { + get; + set; + } + + [DataMember] + public List Scopes + { + get; + set; + } + + [DataMember] + public List Groups + { + get; + set; + } + + [DataMember] + public List Memberships + { + get; + set; + } + + [DataMember] + public List IdentityIds + { + get; + set; + } + + public IdentitySnapshot Clone() + { + return new IdentitySnapshot() + { + ScopeId = this.ScopeId, + Scopes = this.Scopes?.Where(x => x != null).Select(x => x.Clone()).ToList(), + Groups = this.Groups?.Where(x => x != null).Select(x => x.Clone()).ToList(), + Memberships = this.Memberships?.Where(x => x != null).Select(x => x.Clone()).ToList(), + IdentityIds = this.IdentityIds.ToList(), + }; + } + + public override string ToString() + { + return string.Format("[ScopeId = {0}, Scopes={1}, Groups={2}, Memberships={3}, Identities={4}]", + ScopeId, + Scopes?.Count, + Groups?.Count, + Memberships?.Count, + IdentityIds?.Count); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityUpdateData.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityUpdateData.cs new file mode 100644 index 00000000000..e0084a566fe --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/IdentityUpdateData.cs @@ -0,0 +1,18 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class IdentityUpdateData + { + [DataMember] + public Int32 Index { get; set; } + + [DataMember] + public Guid Id { get; set; } + + [DataMember] + public Boolean Updated { get; set; } + } +} \ No newline at end of file diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/RequestHeadersContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/RequestHeadersContext.cs new file mode 100644 index 00000000000..e473bfe1210 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/RequestHeadersContext.cs @@ -0,0 +1,64 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http.Headers; + +namespace GitHub.Services.Identity +{ + public class RequestHeadersContext + { + internal SequenceContext SequenceContext { get; set; } + + internal bool IgnoreCache { get; set; } + + public RequestHeadersContext(SequenceContext sequenceContext) : + this(sequenceContext, false) + { + } + + public RequestHeadersContext(SequenceContext sequenceContext, bool ignoreCache) + { + SequenceContext = sequenceContext; + IgnoreCache = ignoreCache; + } + + private static bool ParseOrGetDefault(string s) + { + if (!string.IsNullOrWhiteSpace(s) && bool.TryParse(s, out var value)) + { + return value; + } + return false; + } + + internal class HeadersUtils + { + public static KeyValuePair[] PopulateRequestHeaders(RequestHeadersContext requestHeaderContext) + { + if (requestHeaderContext == null) + { + return new KeyValuePair[0]; + } + + KeyValuePair[] sequenceContextHeaders = SequenceContext.HeadersUtils.PopulateRequestHeaders(requestHeaderContext.SequenceContext); + KeyValuePair[] resultHeaderPairs = new KeyValuePair[sequenceContextHeaders.Length + 1]; + sequenceContextHeaders.CopyTo(resultHeaderPairs, 0); + resultHeaderPairs[sequenceContextHeaders.Length] = new KeyValuePair(c_ignoreCacheHeader, requestHeaderContext.IgnoreCache.ToString()); + return resultHeaderPairs; + } + + public static bool TryExtractRequestHeaderContext(HttpRequestHeaders httpRequestHeaders, out RequestHeadersContext requestHeadersContext) + { + requestHeadersContext = null; + bool hasIgnoreCacheHeader = httpRequestHeaders.TryGetValues(c_ignoreCacheHeader, out IEnumerable ignoreCacheValue) && ignoreCacheValue != null; + bool hasSequenceContextHeader = SequenceContext.HeadersUtils.TryExtractSequenceContext(httpRequestHeaders, out SequenceContext sequenceContext); + bool ignoreCache = ParseOrGetDefault(ignoreCacheValue?.FirstOrDefault()); + requestHeadersContext = new RequestHeadersContext(sequenceContext, ignoreCache); + return hasIgnoreCacheHeader || hasSequenceContextHeader; + } + + private const string c_ignoreCacheHeader = "X-VSSF-IMS-IgnoreCache"; + } + + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/SequenceContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/SequenceContext.cs new file mode 100644 index 00000000000..da935a228fb --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/SequenceContext.cs @@ -0,0 +1,102 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http.Headers; + +namespace GitHub.Services.Identity +{ + public class SequenceContext + { + public SequenceContext(long identitySequenceId, long groupSequenceId) : + this(identitySequenceId, groupSequenceId, SequenceContext.UnspecifiedSequenceId) + { + } + + public SequenceContext(long identitySequenceId, long groupSequenceId, long organizationIdentitySequenceId) : + this(identitySequenceId, groupSequenceId, organizationIdentitySequenceId, 0) + { + } + + public SequenceContext(long identitySequenceId, long groupSequenceId, long organizationIdentitySequenceId, long pageSize) + { + IdentitySequenceId = identitySequenceId; + GroupSequenceId = groupSequenceId; + OrganizationIdentitySequenceId = organizationIdentitySequenceId; + PageSize = pageSize; + } + + internal long IdentitySequenceId { get; } + + internal long GroupSequenceId { get; } + + internal long OrganizationIdentitySequenceId { get; } + + internal long PageSize { get; } + + internal SequenceContext Clone() + { + return new SequenceContext(IdentitySequenceId, GroupSequenceId, OrganizationIdentitySequenceId); + } + + public override string ToString() => $"[{nameof(IdentitySequenceId)}:{IdentitySequenceId}, {nameof(GroupSequenceId)}:{GroupSequenceId}, {nameof(OrganizationIdentitySequenceId)}:{OrganizationIdentitySequenceId}]"; + + internal const long UnspecifiedSequenceId = -1; + + internal static SequenceContext MaxSequenceContext = new SequenceContext(long.MaxValue, long.MaxValue, long.MaxValue, 0); + internal static SequenceContext InitSequenceContext = new SequenceContext(UnspecifiedSequenceId, UnspecifiedSequenceId, UnspecifiedSequenceId, 0); + + internal class HeadersUtils + { + internal const string MinIdentitySequenceId = "X-VSSF-MinIdentitySequenceId"; + internal const string MinGroupSequenceId = "X-VSSF-MinGroupSequenceId"; + internal const string MinOrgIdentitySequenceId = "X-VSSF-MinOrgIdentitySequenceId"; + internal const string PageSize = "X-VSSF-PagingSize"; + + internal static bool TryExtractSequenceContext(HttpRequestHeaders httpRequestHeaders, out SequenceContext sequenceContext) + { + sequenceContext = null; + bool hasMinIdentitySequenceHeader = httpRequestHeaders.TryGetValues(MinIdentitySequenceId, out var minIdentitySequenceIdValues) && minIdentitySequenceIdValues != null; + bool hasMinGroupSequenceHeader = httpRequestHeaders.TryGetValues(MinGroupSequenceId, out var minGroupSequenceIdValues) && minGroupSequenceIdValues != null; + bool hasMinOrgIdentitySequenceHeader = httpRequestHeaders.TryGetValues(MinOrgIdentitySequenceId, out var minOrgIdentitySequenceIdValues) && minOrgIdentitySequenceIdValues != null; + bool hasPageSizeHeader = httpRequestHeaders.TryGetValues(PageSize, out var pageSizeValues) && pageSizeValues != null; + + if (!hasMinGroupSequenceHeader && !hasMinIdentitySequenceHeader && !hasMinOrgIdentitySequenceHeader) + { + return false; + } + + long minIdentitySequenceId = ParseOrGetDefault(minIdentitySequenceIdValues?.FirstOrDefault()); + long minGroupSequenceId = ParseOrGetDefault(minGroupSequenceIdValues?.FirstOrDefault()); + long minOrgIdentitySequenceId = ParseOrGetDefault(minOrgIdentitySequenceIdValues?.FirstOrDefault()); + long pageSize = ParseOrGetDefault(pageSizeValues?.FirstOrDefault()); + sequenceContext = new SequenceContext(minIdentitySequenceId, minGroupSequenceId, minOrgIdentitySequenceId, pageSize); + return true; + } + + internal static KeyValuePair[] PopulateRequestHeaders(SequenceContext sequenceContext) + { + if (sequenceContext == null) + { + return new KeyValuePair[0]; + } + + return new[] + { + new KeyValuePair(MinIdentitySequenceId, sequenceContext.IdentitySequenceId.ToString()), + new KeyValuePair(MinGroupSequenceId, sequenceContext.GroupSequenceId.ToString()), + new KeyValuePair(MinOrgIdentitySequenceId, sequenceContext.OrganizationIdentitySequenceId.ToString()), + new KeyValuePair(PageSize, sequenceContext.PageSize.ToString()) + }; + } + + private static long ParseOrGetDefault(string s) + { + if (!string.IsNullOrWhiteSpace(s) && long.TryParse(s, out long value)) + { + return value; + } + return UnspecifiedSequenceId; + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/SwapIdentityInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/SwapIdentityInfo.cs new file mode 100644 index 00000000000..25b7652cbeb --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/SwapIdentityInfo.cs @@ -0,0 +1,25 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DataContract] + public class SwapIdentityInfo + { + public SwapIdentityInfo() + { + } + + public SwapIdentityInfo(Guid id1, Guid id2) + { + this.Id1 = id1; + this.Id2 = id2; + } + + [DataMember] + public Guid Id1 { get; private set; } + + [DataMember] + public Guid Id2 { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Identity/TenantInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Identity/TenantInfo.cs new file mode 100644 index 00000000000..482d12254b3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Identity/TenantInfo.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Runtime.Serialization; + +namespace GitHub.Services.Identity +{ + [DebuggerDisplay("{TenantName}")] + [DataContract] + public class TenantInfo + { + [DataMember] + public Guid TenantId { get; set; } + + [DataMember] + public string TenantName { get; set; } + + [DataMember] + public bool HomeTenant { get; set; } + + [DataMember] + public IEnumerable VerifiedDomains { get; set; } + } +} \ No newline at end of file diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/AccountLicense.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/AccountLicense.cs new file mode 100644 index 00000000000..7a113cbc6ca --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/AccountLicense.cs @@ -0,0 +1,134 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.Services.Licensing +{ + /// + /// Represents an Account license + /// + public sealed class AccountLicense : License, IComparable + { + /// + /// An Early Adopter License + /// + public static readonly AccountLicense EarlyAdopter = new AccountLicense(AccountLicenseType.EarlyAdopter); + + /// + /// A Stakeholder License + /// + public static readonly AccountLicense Stakeholder = new AccountLicense(AccountLicenseType.Stakeholder); + + /// + /// An Express License + /// + public static readonly AccountLicense Express = new AccountLicense(AccountLicenseType.Express); + + /// + /// A Professional License + /// + public static readonly AccountLicense Professional = new AccountLicense(AccountLicenseType.Professional); + + /// + /// An Advanced License + /// + public static readonly AccountLicense Advanced = new AccountLicense(AccountLicenseType.Advanced); + + /// + /// Initializes an instance of the class + /// + /// The type of license + private AccountLicense(AccountLicenseType license) + : base(LicensingSource.Account, typeof(AccountLicenseType), (int)license) + { + } + + /// + /// Gets the license type for the license + /// + public AccountLicenseType License + { + get { return (AccountLicenseType)this.GetLicenseAsInt32(); } + } + + /// + /// Compares the current object with another object of the same type. + /// + /// A value that indicates the relative order of the objects being compared. The return value has the following meanings: Value Meaning Less than zero This object is less than the parameter.Zero This object is equal to . Greater than zero This object is greater than . + /// An object to compare with this object. + public int CompareTo(AccountLicense other) + { + return Compare(this, other); + } + + /// + /// Compares two objects of the same type. + /// + /// A value that indicates the relative order of the objects being compared. + /// The left-hand operand to compare + /// The right-hand operand to compare + public static int Compare(AccountLicense left, AccountLicense right) + { + if (object.ReferenceEquals(left, null)) + { + if (object.ReferenceEquals(right, null)) + { + return 0; + } + return -1; + } + else if (object.ReferenceEquals(right, null)) + { + return +1; + } + + return LicenseComparer.Instance.Compare(left, right); + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is greater than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator >(AccountLicense left, AccountLicense right) + { + return Compare(left, right) > 0; + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is less than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator <(AccountLicense left, AccountLicense right) + { + return Compare(left, right) < 0; + } + + /// + /// Gets a instance for the provided license type + /// + /// The type of license + /// A license for the provided license type + /// license was not in the list of allowed values + public static License GetLicense(AccountLicenseType license) + { + switch (license) + { + case AccountLicenseType.None: return None; + case AccountLicenseType.EarlyAdopter: return EarlyAdopter; + case AccountLicenseType.Stakeholder: return Stakeholder; + case AccountLicenseType.Express: return Express; + case AccountLicenseType.Professional: return Professional; + case AccountLicenseType.Advanced: return Advanced; + default: + throw new InvalidEnumArgumentException("license", (int)license, typeof(AccountLicenseType)); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/Definitions.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/Definitions.cs new file mode 100644 index 00000000000..ac75c659c75 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/Definitions.cs @@ -0,0 +1,96 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Licensing +{ + [DataContract] + public enum AssignmentSource + { + None = 0, + Unknown = 1, + GroupRule = 2 + } + + [DataContract] + public enum LicensingOrigin + { + None = 0, + OnDemandPrivateProject = 1, + OnDemandPublicProject = 2, + UserHubInvitation = 3, + PrivateProjectInvitation = 4, + PublicProjectInvitation = 5, + } + + [DataContract] + public enum LicensingSource + { + None = 0, + Account = 1, + Msdn = 2, + Profile = 3, + Auto = 4, + Trial = 5 + } + + [DataContract] + [ClientIncludeModel] + public enum MsdnLicenseType + { + None = 0, + Eligible = 1, + Professional = 2, + Platforms = 3, + TestProfessional = 4, + Premium = 5, + Ultimate = 6, + Enterprise = 7, + } + + [DataContract] + [ClientIncludeModel] + public enum AccountLicenseType + { + None = 0, + EarlyAdopter = 1, + Express = 2, + Professional = 3, + Advanced = 4, + Stakeholder = 5, + } + + [DataContract] + public enum VisualStudioOnlineServiceLevel + { + /// + /// No service rights. The user cannot access the account + /// + [EnumMember] + None = 0, + + /// + /// Default or minimum service level + /// + [EnumMember] + Express = 1, + + /// + /// Premium service level - either by purchasing on the Azure portal or by purchasing the appropriate MSDN subscription + /// + [EnumMember] + Advanced = 2, + + /// + /// Only available to a specific set of MSDN Subscribers + /// + [EnumMember] + AdvancedPlus = 3, + + /// + /// Stakeholder service level + /// + [EnumMember] + Stakeholder = 4, + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/License.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/License.cs new file mode 100644 index 00000000000..fc0e0987f2e --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/License.cs @@ -0,0 +1,403 @@ +using System; +using System.ComponentModel; +using System.Diagnostics; +using System.Text; +using Newtonsoft.Json; + +namespace GitHub.Services.Licensing +{ + /// + /// The base class for a specific license source and license + /// + [JsonConverter(typeof(LicenseJsonConverter))] + [TypeConverter(typeof(LicenseTypeConverter))] + [JsonObject] + [DebuggerDisplay("{ToString(), nq}")] + public abstract class License : IEquatable + { + /// + /// Represents a non-existent license + /// + public static readonly License None = new NoLicense(); + + /// + /// Represents a license that is auto assigned at user sign-in (e.g. from msdn licenses) + /// + public static readonly License Auto = new AutoLicense(); + + private Type licenseEnumType; + private int license; + + /// + /// Initializes a new instance of the License type + /// + /// The source of the license + /// The type for the license enum + /// The value for the license + internal License(LicensingSource source, Type licenseEnumType, int license) + { + this.licenseEnumType = licenseEnumType; + this.license = license; + this.Source = source; + } + + /// + /// Gets the source of the license + /// + public LicensingSource Source { get; private set; } + + /// + /// Gets the internal value for the license + /// + internal int GetLicenseAsInt32() + { + return this.license; + } + + /// + /// Serves as a hash function for a particular type. + /// + /// A hash code for the current . + public override int GetHashCode() + { + return this.Source.GetHashCode() + ^ this.license.GetHashCode(); + } + + /// + /// Determines whether the specified System.Object is equal to the current . + /// + /// The object to compare with the current object. + /// true if the specified object is equal to the current object; otherwise, false. + public override bool Equals(object obj) + { + return this.Equals(obj as License); + } + + /// + /// Determines whether the specified is equal to the current . + /// + /// The object to compare with the current object. + /// true if the specified object is equal to the current object; otherwise, false. + public bool Equals(License obj) + { + return obj != null + && this.Source == obj.Source + && this.license == obj.license; + } + + /// + /// Returns a string that represents the current object. + /// + /// A string that represents the current object. + public override string ToString() + { + var sb = new StringBuilder(); + sb.Append(this.Source.ToString()); + sb.Append('-'); + sb.Append(Enum.GetName(this.licenseEnumType, this.license)); + return sb.ToString(); + } + + /// + /// Parses the provided text into a + /// + /// The text to parse + /// The parsed + /// The text was in the wrong format + public static License Parse(string text) + { + return Parse(text, ignoreCase: false); + } + + /// + /// Parses the provided text into a + /// + /// The text to parse + /// A value indicating whether to ignore the case of the text + /// The parsed + /// The text was in the wrong format + public static License Parse(string text, bool ignoreCase) + { + License license; + if (!TryParse(text, ignoreCase, out license)) + { + throw new FormatException(); + } + + return license; + } + + /// + /// Tries to parse the provided text into a + /// + /// The text to parse + /// The parsed + /// True if the could be parsed; otherwise, false + public static bool TryParse(string text, out License license) + { + return TryParse(text, false, out license); + } + + /// + /// Tries to parse the provided text into a + /// + /// The text to parse + /// A value indicating whether to ignore the case of the text + /// The parsed + /// True if the could be parsed; otherwise, false + public static bool TryParse(string text, bool ignoreCase, out License license) + { + license = None; + if (string.IsNullOrWhiteSpace(text)) + { + return false; + } + + var parts = text.Split('-'); + + LicensingSource source; + if (!Enum.TryParse(parts[0], ignoreCase, out source)) + { + return false; + } + + if (parts.Length == 1 && source == LicensingSource.None) + { + return true; + } + + if (parts.Length == 1 && source == LicensingSource.Auto) + { + license = Auto; + return true; + } + + if (parts.Length > 2) + { + return false; + } + + switch (source) + { + case LicensingSource.Msdn: + MsdnLicenseType msdnLicense; + if (Enum.TryParse(parts[1], ignoreCase, out msdnLicense) && msdnLicense != MsdnLicenseType.None) + { + license = MsdnLicense.GetLicense(msdnLicense); + return true; + } + + break; + + case LicensingSource.Account: + AccountLicenseType accountLicense; + if (Enum.TryParse(parts[1], ignoreCase, out accountLicense) && accountLicense != AccountLicenseType.None) + { + license = AccountLicense.GetLicense(accountLicense); + return true; + } + + break; + + case LicensingSource.Auto: + LicensingSource licenseSource; + if (Enum.TryParse(parts[1], ignoreCase, out licenseSource)) + { + license = AutoLicense.GetLicense(licenseSource); + return true; + } + + break; + } + + return false; + } + + /// + /// Gets a value indicating whether the two provided values are equivalent + /// + /// The first value + /// The second value + /// True if both values are equivalent; otherwise, false + public static bool Equals(License left, License right) + { + if (object.ReferenceEquals(left, null)) + { + return object.ReferenceEquals(right, null); + } + else if (object.ReferenceEquals(right, null)) + { + return false; + } + + return left.Equals(right); + } + + /// + /// Gets a value indicating whether the license is null or + /// + /// The license + /// true if the license is either null or ; otherwise, false + public static bool IsNullOrNone(License license) + { + return license == null || license.Source == LicensingSource.None; + } + + /// + /// Gets the license for the provided source and license type + /// + /// The source + /// The license type + /// The license + internal static License GetLicense(LicensingSource source, int license) + { + switch (source) + { + case LicensingSource.None: + return None; + + case LicensingSource.Account: + return AccountLicense.GetLicense((AccountLicenseType)license); + + case LicensingSource.Msdn: + return MsdnLicense.GetLicense((MsdnLicenseType)license); + + case LicensingSource.Profile: + throw new NotSupportedException(); + + case LicensingSource.Auto: + return Auto; + + default: + throw new InvalidEnumArgumentException("source", (int)source, typeof(LicensingSource)); + } + } + + /// + /// Gets a value indicating whether the two provided values are equivalent + /// + /// The first operand + /// The second operand + /// True if both values are equivalent; otherwise, false + public static bool operator ==(License left, License right) + { + return Equals(left, right); + } + + /// + /// Gets a value indicating whether the two provided values are not equivalent + /// + /// The first operand + /// The second operand + /// True if values are not equivalent; otherwise, false + public static bool operator !=(License left, License right) + { + return !Equals(left, right); + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is greater than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator >(License left, License right) + { + return LicenseComparer.Instance.Compare(left, right) > 0; + } + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is greater than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator >=(License left, License right) + { + return LicenseComparer.Instance.Compare(left, right) >= 0; + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is less than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator <(License left, License right) + { + return LicenseComparer.Instance.Compare(left, right) < 0; + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is less than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator <=(License left, License right) + { + return LicenseComparer.Instance.Compare(left, right) <= 0; + } + + /// + /// A concrete that represents no license + /// + private sealed class NoLicense : License + { + /// + /// Initializes a new instance of the class + /// + internal NoLicense() + : base(LicensingSource.None, null, 0) + { + } + + /// + /// Returns a string that represents the current object. + /// + /// A string that represents the current object. + public override string ToString() + { + return "None"; + } + } + + internal sealed class AutoLicense : License + { + /// + /// Represents an Auto license where the source provider is MSDN + /// + internal static readonly License Msdn = GetLicense(LicensingSource.Msdn); + + /// + /// Initializes a new instance of the class + /// + internal AutoLicense() + : base(LicensingSource.Auto, null, 0) + { + } + + private AutoLicense(LicensingSource licenseSource) + : base(LicensingSource.Auto, typeof(LicensingSource), (int) licenseSource) + { + } + + /// + /// Gets a instance for the provided licensing source + /// + internal static AutoLicense GetLicense(LicensingSource source) + { + return new AutoLicense(source); + } + + /// + /// Returns a string that represents the current object. + /// + /// A string that represents the current object. + public override string ToString() + { + return this.GetLicenseAsInt32() == 0 ? "Auto" : base.ToString(); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseComparer.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseComparer.cs new file mode 100644 index 00000000000..48c39f2dd02 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseComparer.cs @@ -0,0 +1,98 @@ +using System.Collections.Generic; + +namespace GitHub.Services.Licensing +{ + public class LicenseComparer : IComparer + { + public int Compare(License x, License y) + { + if (x == null && y == null) + { + return 0; + } + + if (x == null) + { + return -1; + } + + if (y == null) + { + return 1; + } + + // both licenses have to have a source of Account or Msdn to compare weights + if((x.Source == LicensingSource.Account || x.Source == LicensingSource.Msdn) + && (y.Source == LicensingSource.Account || y.Source == LicensingSource.Msdn)) + { + var thisLicenseWeight = GetWeight(x); + var otherLicenseWeight = GetWeight(y); + + return thisLicenseWeight.CompareTo(otherLicenseWeight); + } + + // Not a known source. Just do a license value compare. + return x.GetLicenseAsInt32().CompareTo(y.GetLicenseAsInt32()); + } + + public int GetWeight(License license) + { + if (license == License.None) + { + return 0; + } + else if (license == AccountLicense.Stakeholder) + { + return 1; + } + else if (license == AccountLicense.Express) + { + return 2; + } + else if (license == AccountLicense.Professional) + { + return 3; + } + else if (license == MsdnLicense.Eligible) + { + return 4; + } + else if (license == MsdnLicense.Professional) + { + return 5; + } + else if (license == AccountLicense.Advanced) + { + return 6; + } + else if (license == MsdnLicense.TestProfessional) + { + return 7; + } + else if (license == MsdnLicense.Platforms) + { + return 8; + } + else if (license == MsdnLicense.Premium) + { + return 9; + } + else if (license == MsdnLicense.Ultimate) + { + return 10; + } + else if (license == MsdnLicense.Enterprise) + { + return 11; + } + else if (license == AccountLicense.EarlyAdopter) + { + return 12; + } + + return 0; // Unexpected license + } + + public static LicenseComparer Instance { get; } = new LicenseComparer(); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseJsonConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseJsonConverter.cs new file mode 100644 index 00000000000..f7baceb7c83 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseJsonConverter.cs @@ -0,0 +1,66 @@ +using System; +using System.Reflection; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Services.Licensing +{ + /// + /// Converts between a and a JSON-serialized license string + /// + internal sealed class LicenseJsonConverter : VssSecureJsonConverter + { + /// + /// Determines whether this instance can convert the specified object type. + /// + /// Type of the object + /// true if this instance can convert the specified object type; otherwise, false. + public override bool CanConvert(Type objectType) + { + return typeof(License).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + /// + /// Reads the JSON representation of the object. + /// + /// The Newtonsoft.Json.JsonReader to read from. + /// Type of the object. + /// The existing value of object being read. + /// The calling serializer. + /// The object value. + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.String) + { + return License.Parse(reader.Value.ToString(), ignoreCase: true); + } + + if (reader.TokenType == JsonToken.Null) + { + return null; + } + + throw new JsonSerializationException(); + } + + /// + /// Writes the JSON representation of the object. + /// + /// The Newtonsoft.Json.JsonWriter to write to. + /// The value. + /// The calling serializer. + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + + if (value == null) + { + writer.WriteNull(); + return; + } + + var license = (License)value; + writer.WriteValue(license.ToString()); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseTypeConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseTypeConverter.cs new file mode 100644 index 00000000000..adedb23c262 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/LicenseTypeConverter.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.Services.Licensing +{ + /// + /// Performs type conversion for a License + /// + internal sealed class LicenseTypeConverter : TypeConverter + { + /// Returns whether this converter can convert an object of the given type to the type of this converter, using the specified context. + /// true if this converter can perform the conversion; otherwise, false. + /// An that provides a format context. + /// A that represents the type you want to convert from. + public override bool CanConvertFrom(ITypeDescriptorContext context, Type sourceType) + { + return sourceType == typeof(string) || base.CanConvertFrom(context, sourceType); + } + + /// Returns whether this converter can convert the object to the specified type, using the specified context. + /// true if this converter can perform the conversion; otherwise, false. + /// An that provides a format context. + /// A that represents the type you want to convert to. + public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) + { + return destinationType == typeof(string) || base.CanConvertTo(context, destinationType); + } + + /// Converts the given object to the type of this converter, using the specified context and culture information. + /// An that represents the converted value. + /// An that provides a format context. + /// The to use as the current culture. + /// The to convert. + /// The conversion cannot be performed. + public override object ConvertFrom(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value) + { + var text = value as string; + if (text != null) + { + return License.Parse(text); + } + + return base.ConvertFrom(context, culture, value); + } + + /// Converts the given value object to the specified type, using the specified context and culture information. + /// An that represents the converted value. + /// An that provides a format context. + /// A . If null is passed, the current culture is assumed. + /// The to convert. + /// The to convert the parameter to. + /// The parameter is null. + /// The conversion cannot be performed. + public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType) + { + var license = value as License; + if (license != null && destinationType == typeof(string)) + { + return license.ToString(); + } + + return base.ConvertTo(context, culture, value, destinationType); + } + + /// Returns whether the given value object is valid for this type and for the specified context. + /// true if the specified value is valid for this object; otherwise, false. + /// An that provides a format context. + /// The to test for validity. + public override bool IsValid(ITypeDescriptorContext context, object value) + { + var text = value as string; + if (text != null) + { + License license; + return License.TryParse(text, out license); + } + + return base.IsValid(context, value); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Licensing/MsdnLicense.cs b/src/Sdk/WebApi/WebApi/Contracts/Licensing/MsdnLicense.cs new file mode 100644 index 00000000000..8aefe17da17 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Licensing/MsdnLicense.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace GitHub.Services.Licensing +{ + /// + /// Represents an Msdn license + /// + public sealed class MsdnLicense : License, IComparable + { + /// + /// The account user is MSDN Eligible + /// + public static readonly MsdnLicense Eligible = new MsdnLicense(MsdnLicenseType.Eligible); + + /// + /// The account user has an MSDN Professional license + /// + public static readonly MsdnLicense Professional = new MsdnLicense(MsdnLicenseType.Professional); + + /// + /// The account user has an MSDN Platforms license + /// + public static readonly MsdnLicense Platforms = new MsdnLicense(MsdnLicenseType.Platforms); + + /// + /// The account user has an MSDN TestProfessional license + /// + public static readonly MsdnLicense TestProfessional = new MsdnLicense(MsdnLicenseType.TestProfessional); + + /// + /// The account user has an MSDN Premium license + /// + public static readonly MsdnLicense Premium = new MsdnLicense(MsdnLicenseType.Premium); + + /// + /// The account user has an MSDN Ultimate license + /// + public static readonly MsdnLicense Ultimate = new MsdnLicense(MsdnLicenseType.Ultimate); + + /// + /// The account user has an MSDN Enterprise license + /// + public static readonly MsdnLicense Enterprise = new MsdnLicense(MsdnLicenseType.Enterprise); + + /// + /// Initializes an instance of the class + /// + /// The type of license + private MsdnLicense(MsdnLicenseType license) + : base(LicensingSource.Msdn, typeof(MsdnLicenseType), (int)license) + { + } + + /// + /// Gets the license type for the license + /// + public MsdnLicenseType License + { + get { return (MsdnLicenseType)this.GetLicenseAsInt32(); } + } + + /// + /// Compares the current object with another object of the same type. + /// + /// A value that indicates the relative order of the objects being compared. The return value has the following meanings: Value Meaning Less than zero This object is less than the parameter.Zero This object is equal to . Greater than zero This object is greater than . + /// An object to compare with this object. + public int CompareTo(MsdnLicense other) + { + return Compare(this, other); + } + + /// + /// Compares two objects of the same type. + /// + /// A value that indicates the relative order of the objects being compared. + /// The left-hand operand to compare + /// The right-hand operand to compare + public static int Compare(MsdnLicense left, MsdnLicense right) + { + if (object.ReferenceEquals(left, null)) + { + if (object.ReferenceEquals(right, null)) return 0; + return -1; + } + else if (object.ReferenceEquals(right, null)) + { + return +1; + } + + return LicenseComparer.Instance.Compare(left, right); + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is greater than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator >(MsdnLicense left, MsdnLicense right) + { + return Compare(left, right) > 0; + } + + /// + /// Compares two objects of the same type. + /// + /// True if the left-hand value is less than the right-hand value; otherwise, false + /// The left-hand operand to compare + /// The right-hand operand to compare + public static bool operator <(MsdnLicense left, MsdnLicense right) + { + return Compare(left, right) < 0; + } + + /// + /// Gets a instance for the provided license type + /// + /// The type of license + /// A license for the provided license type + /// license was not in the list of allowed values + public static License GetLicense(MsdnLicenseType license) + { + switch (license) + { + case MsdnLicenseType.None: return None; + case MsdnLicenseType.Eligible: return Eligible; + case MsdnLicenseType.Professional: return Professional; + case MsdnLicenseType.Platforms: return Platforms; + case MsdnLicenseType.TestProfessional: return TestProfessional; + case MsdnLicenseType.Premium: return Premium; + case MsdnLicenseType.Ultimate: return Ultimate; + case MsdnLicenseType.Enterprise: return Enterprise; + default: + throw new InvalidEnumArgumentException("license", (int)license, typeof(MsdnLicenseType)); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Location/AccessMapping.cs b/src/Sdk/WebApi/WebApi/Contracts/Location/AccessMapping.cs new file mode 100644 index 00000000000..622daf2db01 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Location/AccessMapping.cs @@ -0,0 +1,151 @@ +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using System; +using System.Diagnostics; +using System.Runtime.Serialization; +using System.Xml; + +namespace GitHub.Services.Location +{ + /// + /// + /// + [DataContract] + public class AccessMapping : ISecuredObject + { + public AccessMapping() { } + + public AccessMapping(String moniker, String displayName, String accessPoint, Guid serviceOwner = new Guid()) + :this (moniker, displayName, accessPoint, serviceOwner, null) + { + } + + public AccessMapping(String moniker, String displayName, String accessPoint, Guid serviceOwner, String virtualDirectory) + { + DisplayName = displayName; + Moniker = moniker; + AccessPoint = accessPoint; + ServiceOwner = serviceOwner; + VirtualDirectory = virtualDirectory; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String DisplayName + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Moniker + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String AccessPoint + { + get; + set; + } + + /// + /// The service which owns this access mapping e.g. TFS, ELS, etc. + /// + [DataMember] + public Guid ServiceOwner + { + get; + set; + } + + /// + /// Part of the access mapping which applies context after the access point + /// of the server. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String VirtualDirectory + { + get; + set; + } + + public AccessMapping Clone() + { + return new AccessMapping(Moniker, DisplayName, AccessPoint, ServiceOwner, VirtualDirectory); + } + + /// + /// + /// + /// + /// + /// + internal static AccessMapping FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + AccessMapping obj = new AccessMapping(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + Boolean empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "AccessPoint": + obj.AccessPoint = reader.Value; + break; + case "DisplayName": + obj.DisplayName = reader.Value; + break; + case "Moniker": + obj.Moniker = reader.Value; + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => LocationSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => LocationSecurityConstants.Read; + + string ISecuredObject.GetToken() => LocationSecurityConstants.NamespaceRootToken; + #endregion + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Location/ConnectionData.cs b/src/Sdk/WebApi/WebApi/Contracts/Location/ConnectionData.cs new file mode 100644 index 00000000000..3221dc29d58 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Location/ConnectionData.cs @@ -0,0 +1,397 @@ +using GitHub.Services.Common; +using GitHub.Services.Common.Internal; +using GitHub.Services.WebApi; +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Runtime.Serialization; +using System.Xml; +using IC = GitHub.Services.Identity; + +namespace GitHub.Services.Location +{ + /// + /// Data transfer class that holds information needed to set up a + /// connection with a VSS server. + /// + [DataContract] + public class ConnectionData : ISecuredObject + { + /// + /// The Id of the authenticated user who made this request. More information about the user can be + /// obtained by passing this Id to the Identity service + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IC.Identity AuthenticatedUser + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The Id of the authorized user who made this request. More information about the user can be + /// obtained by passing this Id to the Identity service + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IC.Identity AuthorizedUser + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The instance id for this host. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid InstanceId + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The id for the server. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid DeploymentId + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The type for the server Hosted/OnPremises. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DeploymentFlags DeploymentType + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The last user access for this instance. Null if not requested specifically. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTime? LastUserAccess + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// Data that the location service holds. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public LocationServiceData LocationServiceData + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The virtual directory of the host we are talking to. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String WebApplicationRelativeDirectory + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// + /// + /// + /// + /// + internal static ConnectionData FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + ConnectionData obj = new ConnectionData(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + Boolean empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "CatalogResourceId": + obj.m_catalogResourceId = XmlConvert.ToGuid(reader.Value); + break; + case "InstanceId": + obj.InstanceId = XmlConvert.ToGuid(reader.Value); + break; + case "ServerCapabilities": + obj.m_serverCapabilities = XmlConvert.ToInt32(reader.Value); + break; + case "WebApplicationRelativeDirectory": + obj.WebApplicationRelativeDirectory = reader.Value; + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + case "AuthenticatedUser": + obj.AuthenticatedUser = IC.Identity.FromXml(serviceProvider, reader); + break; + case "AuthorizedUser": + obj.AuthorizedUser = IC.Identity.FromXml(serviceProvider, reader); + break; + case "LocationServiceData": + obj.LocationServiceData = LocationServiceData.FromXml(serviceProvider, reader); + break; + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => LocationSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => LocationSecurityConstants.Read; + + string ISecuredObject.GetToken() => LocationSecurityConstants.NamespaceRootToken; + #endregion + + private Guid m_catalogResourceId; + private Int32 m_serverCapabilities; + } + + /// + /// Data transfer class used to transfer data about the location + /// service data over the web service. + /// + [DataContract] + public class LocationServiceData : ISecuredObject + { + /// + /// The identifier of the deployment which is hosting this location data + /// (e.g. SPS, TFS, ELS, Napa, etc.) + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid ServiceOwner + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// Data about the access mappings contained by this location service. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ICollection AccessMappings + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// Data that the location service holds. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Boolean ClientCacheFresh + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The time to live on the location service cache. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [DefaultValue(3600)] + public Int32 ClientCacheTimeToLive + { + get + { + return m_clientCacheTimeToLive; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + set + { + m_clientCacheTimeToLive = value; + } + } + + /// + /// The default access mapping moniker for the server. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String DefaultAccessMappingMoniker + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The obsolete id for the last change that + /// took place on the server (use LastChangeId64). + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 LastChangeId + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + /// + /// The non-truncated 64-bit id for the last change that + /// took place on the server. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int64 LastChangeId64 + { + get + { + // Use obsolete truncated 32-bit value when receiving message from "old" server that doesn't provide 64-bit value + return m_lastChangeId64 != 0 ? m_lastChangeId64 : LastChangeId; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + set + { + m_lastChangeId64 = value; + } + } + + /// + /// Data about the service definitions contained by this location service. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ICollection ServiceDefinitions + { + get; + + [EditorBrowsable(EditorBrowsableState.Never)] + set; + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => LocationSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => LocationSecurityConstants.Read; + + string ISecuredObject.GetToken() => LocationSecurityConstants.NamespaceRootToken; + #endregion + + /// + /// + /// + /// + /// + /// + internal static LocationServiceData FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + LocationServiceData obj = new LocationServiceData(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + Boolean empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "AccessPointsDoNotIncludeWebAppRelativeDirectory": + obj.m_accessPointsDoNotIncludeWebAppRelativeDirectory = XmlConvert.ToBoolean(reader.Value); + break; + case "ClientCacheFresh": + obj.ClientCacheFresh = XmlConvert.ToBoolean(reader.Value); + break; + case "DefaultAccessMappingMoniker": + obj.DefaultAccessMappingMoniker = reader.Value; + break; + case "LastChangeId": + obj.LastChangeId = XmlConvert.ToInt32(reader.Value); + break; + case "ClientCacheTimeToLive": + obj.ClientCacheTimeToLive = XmlConvert.ToInt32(reader.Value); + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + case "AccessMappings": + obj.AccessMappings = XmlUtility.ArrayOfObjectFromXml(serviceProvider, reader, "AccessMapping", false, AccessMapping.FromXml); + break; + case "ServiceDefinitions": + obj.ServiceDefinitions = XmlUtility.ArrayOfObjectFromXml(serviceProvider, reader, "ServiceDefinition", false, ServiceDefinition.FromXml); + break; + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + + private Int32 m_clientCacheTimeToLive = 3600; + private Boolean m_accessPointsDoNotIncludeWebAppRelativeDirectory; + private Int64 m_lastChangeId64; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Location/Constants.cs b/src/Sdk/WebApi/WebApi/Contracts/Location/Constants.cs new file mode 100644 index 00000000000..83f5b4c07e8 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Location/Constants.cs @@ -0,0 +1,107 @@ +using GitHub.Services.Common; +using System; +using System.ComponentModel; +using System.Runtime.Serialization; + +namespace GitHub.Services.Location +{ + /// + /// + /// + [DataContract] + public enum RelativeToSetting + { + [EnumMember] + Context = 0, + + [EnumMember] + WebApplication = 2, + + [EnumMember] + FullyQualified = 3 + } + + [DataContract] + public enum ServiceStatus : byte + { + [EnumMember] + Assigned = 0, + + [EnumMember] + Active = 1, + + [EnumMember] + Moving = 2, + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataContract] + public enum InheritLevel : byte + { + None = 0, + + // The definition is visible on the deployment + Deployment = 1, + + // The definition is visible on every account (unless overridden) + Account = 2, + + // The definition is visible on every collection (unless overridden) + Collection = 4, + + All = Deployment | Account | Collection + } + + /// + /// + /// + public static class ServiceInterfaces + { + public const String LocationService2 = "LocationService2"; + public const String VsService = "VsService"; + public const String VirtualLocation = "VirtualLocation"; + } + + public static class LocationServiceConstants + { + /// + /// If a Location Service has an entry for an application location service, that + /// location service definition will have an identifier of this value. + /// + public static readonly Guid ApplicationIdentifier = new Guid("8d299418-9467-402b-a171-9165e2f703e2"); + + /// + /// Pointer to the root location service instance + /// + public static readonly Guid RootIdentifier = new Guid("951917AC-A960-4999-8464-E3F0AA25B381"); + + + /// + /// All Location Services have a reference to their own service definition. That + /// service definition has an identifier of this value. + /// + public static readonly Guid SelfReferenceIdentifier = new Guid("464CCB8D-ABAF-4793-B927-CFDC107791EE"); + } + + [GenerateAllConstants] + public static class AccessMappingConstants + { + public static readonly string PublicAccessMappingMoniker = "PublicAccessMapping"; + public static readonly string ServerAccessMappingMoniker = "ServerAccessMapping"; + public static readonly string ClientAccessMappingMoniker = "ClientAccessMapping"; + public static readonly string HostGuidAccessMappingMoniker = "HostGuidAccessMapping"; + public static readonly string RootDomainMappingMoniker = "RootDomainMapping"; + public static readonly string AzureInstanceMappingMoniker = "AzureInstanceMapping"; + public static readonly string ServicePathMappingMoniker = "ServicePathMapping"; + public static readonly string ServiceDomainMappingMoniker = "ServiceDomainMapping"; + public static readonly string LegacyPublicAccessMappingMoniker = "LegacyPublicAccessMapping"; + public static readonly string MessageQueueAccessMappingMoniker = "MessageQueueAccessMapping"; + public static readonly string LegacyAppDotAccessMappingMoniker = "LegacyAppDotDomain"; + public static readonly string AffinitizedMultiInstanceAccessMappingMoniker = "AffinitizedMultiInstanceAccessMapping"; + + public static readonly string VstsAccessMapping = "VstsAccessMapping"; + public static readonly string DevOpsAccessMapping = "CodexAccessMapping"; + + [Obsolete][EditorBrowsable(EditorBrowsableState.Never)] public static readonly string ServiceAccessMappingMoniker = "ServiceAccessMappingMoniker"; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Location/LocationMapping.cs b/src/Sdk/WebApi/WebApi/Contracts/Location/LocationMapping.cs new file mode 100644 index 00000000000..24d73a2bd1f --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Location/LocationMapping.cs @@ -0,0 +1,115 @@ +using System; +using System.Diagnostics; +using System.Runtime.Serialization; +using System.Xml; +using System.Xml.Serialization; +using GitHub.Services.WebApi; +using GitHub.Services.Common; + +namespace GitHub.Services.Location +{ + /// + /// + /// + [DataContract] + public class LocationMapping : ISecuredObject + { + public LocationMapping() { } + + public LocationMapping(String accessMappingMoniker, String location) + { + AccessMappingMoniker = accessMappingMoniker; + Location = location; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("accessMappingMoniker")] // needed for servicing serialization + public String AccessMappingMoniker + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("location")] // needed for servicing serialization + public String Location + { + get; + set; + } + + public override string ToString() => Location; + + /// + /// + /// + /// + /// + /// + internal static LocationMapping FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + LocationMapping obj = new LocationMapping(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + Boolean empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "accessMappingMoniker": + obj.AccessMappingMoniker = reader.Value; + break; + case "location": + obj.Location = reader.Value; + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => LocationSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => LocationSecurityConstants.Read; + + string ISecuredObject.GetToken() + { + return LocationSecurityConstants.ServiceDefinitionsToken; + } + #endregion + + public LocationMapping Clone() + => new LocationMapping(AccessMappingMoniker, Location); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Location/ResourceAreaInfo.cs b/src/Sdk/WebApi/WebApi/Contracts/Location/ResourceAreaInfo.cs new file mode 100644 index 00000000000..b784e3b20c7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Location/ResourceAreaInfo.cs @@ -0,0 +1,31 @@ +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.Location +{ + [DataContract] + public class ResourceAreaInfo + { + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid Id + { + get; + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Name + { + get; + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String LocationUrl + { + get; + set; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Location/ServiceDefinition.cs b/src/Sdk/WebApi/WebApi/Contracts/Location/ServiceDefinition.cs new file mode 100644 index 00000000000..bbf1635e7e2 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Location/ServiceDefinition.cs @@ -0,0 +1,649 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Runtime.Serialization; +using System.Xml; +using System.Xml.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.Location +{ + /// + /// + /// + [DebuggerDisplay("{ServiceType}:{Identifier}")] + [DataContract] + public class ServiceDefinition : ISecuredObject + { + public ServiceDefinition() + { + LocationMappings = new List(); + Status = ServiceStatus.Active; + Properties = new PropertiesCollection(); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public ServiceDefinition( + String serviceType, + Guid identifier, + String displayName, + String relativePath, + RelativeToSetting relativeToSetting, + String description, + String toolId, + List locationMappings = null, + Guid serviceOwner = new Guid()) + { + ServiceType = serviceType; + Identifier = identifier; + DisplayName = displayName; + RelativePath = relativePath; + RelativeToSetting = relativeToSetting; + Description = description; + ToolId = toolId; + + if (locationMappings == null) + { + locationMappings = new List(); + } + + LocationMappings = locationMappings; + ServiceOwner = serviceOwner; + Properties = new PropertiesCollection(); + Status = ServiceStatus.Active; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("serviceType")] // XML Attribute is required for servicing xml de-serialization + public String ServiceType + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("identifier")] // XML Attribute is required for servicing xml de-serialization + public Guid Identifier + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("displayName")] // XML Attribute is required for servicing xml de-serialization + public String DisplayName + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public RelativeToSetting RelativeToSetting + { + get; + set; + } + + [XmlAttribute("relativeToSetting")] // XML Attribute is required for servicing xml de-serialization + [EditorBrowsable(EditorBrowsableState.Never)] + public Int32 RelativeToSettingValue + { + get + { + return (Int32)RelativeToSetting; + } + set + { + RelativeToSetting = (RelativeToSetting)value; + } + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("relativePath")] // XML Attribute is required for servicing xml de-serialization + public String RelativePath + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("description")] // XML Attribute is required for servicing xml de-serialization + public String Description + { + get; + set; + } + + /// + /// The service which owns this definition e.g. TFS, ELS, etc. + /// + [DataMember] + public Guid ServiceOwner + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public List LocationMappings + { + get; + set; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [XmlAttribute("toolId")] // XML Attribute is required for servicing xml de-serialization + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ToolId { get; set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String ParentServiceType + { + get; + set; + } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid ParentIdentifier + { + get; + set; + } + + [DefaultValue(ServiceStatus.Active)] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ServiceStatus Status + { + get; + set; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlAttribute("inheritLevel")] + public InheritLevel InheritLevel + { + get; + set; + } + + /// + /// + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + [XmlIgnore] + public PropertiesCollection Properties + { + get; + set; + } + + //***************************************************************************************************************** + /// + /// Generic Property accessor. Returns default value of T if not found + /// + //***************************************************************************************************************** + public T GetProperty(String name, T defaultValue) + { + T value; + if (Properties != null && Properties.TryGetValue(name, out value)) + { + return value; + } + else + { + return defaultValue; + } + } + + //***************************************************************************************************************** + /// + /// Property accessor. value will be null if not found. + /// + //***************************************************************************************************************** + public Boolean TryGetProperty(String name, out Object value) + { + value = null; + return Properties == null ? false : Properties.TryGetValue(name, out value); + } + + //***************************************************************************************************************** + /// + /// Internal function to initialize persisted property. + /// + //***************************************************************************************************************** + public void SetProperty(String name, Object value) + { + m_hasModifiedProperties = true; + + //don't remove properties with null + //vals, just set them to null... + Properties[name] = value; + } + + //***************************************************************************************************************** + /// + /// + /// + /// + //***************************************************************************************************************** + [EditorBrowsable(EditorBrowsableState.Never)] + public Boolean HasModifiedProperties + { + get + { + return m_hasModifiedProperties; + } + } + + //***************************************************************************************************************** + /// + /// + /// + //***************************************************************************************************************** + [EditorBrowsable(EditorBrowsableState.Never)] + public void ResetModifiedProperties() + { + m_hasModifiedProperties = false; + } + + /// + /// The current resource version supported by this resource location. Copied from ApiResourceLocation. + /// + [XmlAttribute("resourceVersion")] + [DefaultValue(0)] + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Int32 ResourceVersion { get; set; } + + /// + /// Minimum api version that this resource supports. Copied from ApiResourceLocation. + /// + [XmlIgnore] + public Version MinVersion { get; set; } + + /// + /// Minimum api version that this resource supports. Copied from ApiResourceLocation. + /// + [XmlAttribute("minVersion")] + [DefaultValue(null)] + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "MinVersion")] + public String MinVersionString + { + get + { + if (MinVersion == null) + { + return null; + } + else + { + return MinVersion.ToString(2); + } + } + set + { + if (String.IsNullOrEmpty(value)) + { + MinVersion = null; + } + else + { + MinVersion = new Version(value); + } + } + } + + /// + /// Maximum api version that this resource supports (current server version for this resource). Copied from ApiResourceLocation. + /// + [XmlIgnore] + public Version MaxVersion { get; set; } + + /// + /// Maximum api version that this resource supports (current server version for this resource). Copied from ApiResourceLocation. + /// + [XmlAttribute("maxVersion")] + [DefaultValue(null)] + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "MaxVersion")] + public String MaxVersionString + { + get + { + if (MaxVersion == null) + { + return null; + } + else + { + return MaxVersion.ToString(2); + } + } + set + { + if (String.IsNullOrEmpty(value)) + { + MaxVersion = null; + } + else + { + MaxVersion = new Version(value); + } + } + } + + /// + /// The latest version of this resource location that is in "Release" (non-preview) mode. Copied from ApiResourceLocation. + /// + [XmlIgnore] + public Version ReleasedVersion { get; set; } + + /// + /// The latest version of this resource location that is in "Release" (non-preview) mode. Copied from ApiResourceLocation. + /// + [XmlAttribute("releasedVersion")] + [DefaultValue(null)] + [DataMember(IsRequired = false, EmitDefaultValue = false, Name = "ReleasedVersion")] + public String ReleasedVersionString + { + get + { + if (ReleasedVersion == null) + { + return null; + } + else + { + return ReleasedVersion.ToString(2); + } + } + set + { + if (String.IsNullOrEmpty(value)) + { + ReleasedVersion = null; + } + else + { + ReleasedVersion = new Version(value); + } + } + } + + /// + /// + /// + /// + public ServiceDefinition Clone() + { + return Clone(true); + } + + public ServiceDefinition Clone(Boolean includeLocationMappings) + { + List locationMappings = null; + + if (LocationMappings != null && includeLocationMappings) + { + locationMappings = new List(LocationMappings.Count); + + foreach (LocationMapping mapping in LocationMappings) + { + locationMappings.Add(new LocationMapping() + { + AccessMappingMoniker = mapping.AccessMappingMoniker, + Location = mapping.Location + }); + } + } + else + { + locationMappings = new List(); + } + + PropertiesCollection properties = null; + + if (Properties != null) + { + // since we are cloning, don't validate the values + properties = new PropertiesCollection(Properties, validateExisting: false); + } + else + { + properties = new PropertiesCollection(); + } + + ServiceDefinition serviceDefinition = new ServiceDefinition() + { + ServiceType = ServiceType, + Identifier = Identifier, + DisplayName = DisplayName, + RelativePath = RelativePath, + RelativeToSetting = RelativeToSetting, + Description = Description, + LocationMappings = locationMappings, + ServiceOwner = ServiceOwner, + ToolId = ToolId, + ParentServiceType = ParentServiceType, + ParentIdentifier = ParentIdentifier, + Status = Status, + Properties = properties, + ResourceVersion = ResourceVersion, + MinVersion = MinVersion, + MaxVersion = MaxVersion, + ReleasedVersion = ReleasedVersion + }; + + serviceDefinition.ResetModifiedProperties(); + return serviceDefinition; + } + + /// + /// + /// + /// + /// + /// + internal static ServiceDefinition FromXml(IServiceProvider serviceProvider, XmlReader reader) + { + ServiceDefinition obj = new ServiceDefinition(); + Debug.Assert(reader.NodeType == XmlNodeType.Element, "Expected a node."); + + Boolean empty = reader.IsEmptyElement; + + // Process the xml attributes + if (reader.HasAttributes) + { + while (reader.MoveToNextAttribute()) + { + switch (reader.Name) + { + case "description": + obj.Description = reader.Value; + break; + case "displayName": + obj.DisplayName = reader.Value; + break; + case "identifier": + obj.Identifier = XmlConvert.ToGuid(reader.Value); + break; + case "isSingleton": + obj.m_isSingleton = XmlConvert.ToBoolean(reader.Value); + break; + case "relativePath": + obj.RelativePath = reader.Value; + break; + case "relativeToSetting": + obj.RelativeToSetting = (RelativeToSetting)XmlConvert.ToInt32(reader.Value); + break; + case "serviceType": + obj.ServiceType = reader.Value; + break; + case "toolId": + obj.ToolId = reader.Value; + break; + case "resourceVersion": + obj.ResourceVersion = XmlConvert.ToInt32(reader.Value); + break; + case "minVersion": + obj.MinVersionString = reader.Value; + break; + case "maxVersion": + obj.MaxVersionString = reader.Value; + break; + case "releasedVersion": + obj.ReleasedVersionString = reader.Value; + break; + default: + // Allow attributes such as xsi:type to fall through + break; + } + } + } + + // Process the fields in Xml elements + reader.Read(); + if (!empty) + { + while (reader.NodeType == XmlNodeType.Element) + { + switch (reader.Name) + { + case "LocationMappings": + obj.LocationMappings = new List(XmlUtility.ArrayOfObjectFromXml(serviceProvider, reader, "LocationMapping", false, LocationMapping.FromXml)); + break; + case "Properties": + // Ignore properties + reader.ReadOuterXml(); + break; + default: + // Make sure that we ignore XML node trees we do not understand + reader.ReadOuterXml(); + break; + } + } + reader.ReadEndElement(); + } + return obj; + } + + /// + /// Returns the LocationMapping for the AccessMapping provided or null + /// if this ServiceDefinition does not have a LocationMapping for the provided + /// AccessMapping. This function will always return null if it is called + /// on a non-relative ServiceDefinition. + /// + /// + /// The AccessMapping to find the LocationMapping for. + /// + /// + /// The LocationMapping for the AccessMapping provided or null if this + /// ServiceDefinition does not have a LocationMapping for the provided + /// AccessMapping. This function will always return null if it is called + /// on a non-relative ServiceDefinition. + /// + public LocationMapping GetLocationMapping(AccessMapping accessMapping) + { + ArgumentUtility.CheckForNull(accessMapping, "accessMapping"); + + return GetLocationMapping(accessMapping.Moniker); + } + + public LocationMapping GetLocationMapping(String accessMappingMoniker) + { + ArgumentUtility.CheckForNull(accessMappingMoniker, "accessMappingMoniker"); + + // If this is FullyQualified then look through our location mappings + if (RelativeToSetting == RelativeToSetting.FullyQualified) + { + foreach (LocationMapping locationMapping in LocationMappings) + { + if (VssStringComparer.AccessMappingMoniker.Equals(locationMapping.AccessMappingMoniker, accessMappingMoniker)) + { + return locationMapping; + } + } + } + + // We weren't able to find the location for the access mapping. Return null. + return null; + } + + /// + /// Adds a location mapping for the provided access mapping and location + /// to the service definition. Note that if a mapping already exists for + /// the provided access mapping, it will be overwritten. + /// + /// The access mapping this location mapping is for. + /// This access mapping must already be registered in the LocationService. To create + /// a new access mapping, see LocationService.ConfigureAccessMapping + /// This value must be null if the RelativeToSetting + /// for this ServiceDefinition is something other than FullyQualified. If + /// this ServiceDefinition has a RelativeToSetting of FullyQualified, this + /// value must not be null and should be the location where this service resides + /// for this access mapping. + public void AddLocationMapping(AccessMapping accessMapping, String location) + { + if (RelativeToSetting != RelativeToSetting.FullyQualified) + { + throw new InvalidOperationException(WebApiResources.RelativeLocationMappingErrorMessage()); + } + + // Make sure the location has a value + if (location == null) + { + throw new ArgumentException(WebApiResources.FullyQualifiedLocationParameter()); + } + + // See if an entry for this access mapping already exists, if it does, overwrite it. + foreach (LocationMapping mapping in LocationMappings) + { + if (VssStringComparer.AccessMappingMoniker.Equals(mapping.AccessMappingMoniker, accessMapping.Moniker)) + { + mapping.Location = location; + return; + } + } + + // This is a new entry for this access mapping, just add it. + LocationMappings.Add(new LocationMapping() { AccessMappingMoniker = accessMapping.Moniker, Location = location }); + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => LocationSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => LocationSecurityConstants.Read; + + string ISecuredObject.GetToken() + { + return LocationSecurityConstants.ServiceDefinitionsToken; + } + #endregion + + private Boolean m_isSingleton; + private Boolean m_hasModifiedProperties = true; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/PagedList.cs b/src/Sdk/WebApi/WebApi/Contracts/PagedList.cs new file mode 100644 index 00000000000..93e8097ad9c --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/PagedList.cs @@ -0,0 +1,25 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.WebApi +{ + public interface IPagedList : IList + { + string ContinuationToken { get; } + } + + public class PagedList : List, IPagedList + { + public PagedList(IEnumerable list, String continuationToken) + : base(list) + { + this.ContinuationToken = continuationToken; + } + + public String ContinuationToken + { + get; + private set; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/AddPatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/AddPatchOperation.cs new file mode 100644 index 00000000000..959e162100a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/AddPatchOperation.cs @@ -0,0 +1,94 @@ +using System.Collections; +using System.ComponentModel; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Represents the JSON Patch Add operation. + /// + /// The model the patch operation applies to. + public class AddPatchOperation : PatchOperation + { + public AddPatchOperation() + { + this.Operation = Operation.Add; + } + + public AddPatchOperation(string path, object value): this() + { + this.Path = path; + this.Value = value; + } + + /// + /// Creates the strongly typed PatchOperation and validates the operation. + /// + /// The simple json patch operation model. + /// A valid and strongly typed PatchOperation. + [EditorBrowsable(EditorBrowsableState.Never)] + public static new PatchOperation CreateFromJson(JsonPatchOperation operation) + { + ValidatePath(operation); + + var value = ValidateAndGetValue(operation); + if (value == null) + { + throw new VssPropertyValidationException("Value", PatchResources.ValueCannotBeNull()); + } + + return new AddPatchOperation(operation.Path, value); + } + + /// + /// Applies the Add patch operation to the target + /// + /// The object to apply the operation to. + public override void Apply(TModel target) + { + this.Apply( + target, + (type, parent, current) => + { + // Empty current means replace the whole object. + if (string.IsNullOrEmpty(current)) + { + parent = this.Value; + } + else if (type.IsList()) + { + var list = (IList)parent; + if (current == EndOfIndex) + { + list.Add(this.Value); + } + else + { + int index; + // When index == list.Count it's the same + // as doing an index append to the end. + if (int.TryParse(current, out index) && + list.Count >= index) + { + list.Insert(index, this.Value); + } + else + { + // We can't insert beyond the length of the list. + throw new PatchOperationFailedException(PatchResources.IndexOutOfRange(this.Path)); + } + } + } + else if (type.IsDictionary()) + { + ((IDictionary)parent)[current] = this.Value; + } + else + { + type.SetMemberValue(current, parent, this.Value); + } + }); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/Exceptions.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/Exceptions.cs new file mode 100644 index 00000000000..a9d4ca455de --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/Exceptions.cs @@ -0,0 +1,64 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi.Patch +{ + [Serializable] + [ExceptionMapping("0.0", "3.0", "PatchOperationFailedException", "GitHub.Services.WebApi.Patch.PatchOperationFailedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class PatchOperationFailedException : VssServiceException + { + public PatchOperationFailedException() + { + } + + public PatchOperationFailedException(string message) + : base(message) + { + } + + public PatchOperationFailedException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected PatchOperationFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "InvalidPatchFieldNameException", "GitHub.Services.WebApi.Patch.InvalidPatchFieldNameException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidPatchFieldNameException : PatchOperationFailedException + { + public InvalidPatchFieldNameException(string message) + : base(message) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "TestPatchOperationFailedException", "GitHub.Services.WebApi.Patch.TestPatchOperationFailedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TestPatchOperationFailedException : PatchOperationFailedException + { + public TestPatchOperationFailedException() + { + } + + public TestPatchOperationFailedException(string message) + : base(message) + { + } + + public TestPatchOperationFailedException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected TestPatchOperationFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchDocument.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchDocument.cs new file mode 100644 index 00000000000..b4a3a119e86 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchDocument.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// The interface for the Patch Document + /// + /// The type this patch document applies to. + public interface IPatchDocument : IPatchOperationApplied, IPatchOperationApplying + { + /// + /// The patch operations. + /// + IEnumerable> Operations { get; } + + /// + /// Applies the operations to the target object. + /// + /// The object to apply the operations to. + void Apply(TModel target); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperation.cs new file mode 100644 index 00000000000..e7109867a58 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperation.cs @@ -0,0 +1,44 @@ +using System.Collections.Generic; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// The interface for the Patch Operation. + /// + /// The type this patch document applies to. + public interface IPatchOperation : IPatchOperationApplied, IPatchOperationApplying + { + /// + /// The operation to perform. + /// + Operation Operation { get; } + + /// + /// The JSON path to apply on the model for this operation. + /// + string Path { get; } + + /// + /// The JSON path split into segments + /// + IEnumerable EvaluatedPath { get; } + + /// + /// The path to copy/move from, applies only to the Copy/Move operation. + /// + string From { get; } + + /// + /// The value to set with this patch operation. Only applies to + /// Add/Replace/Test. + /// + /// The strongly (best effort) typed representation of the value. + object Value { get; } + + /// + /// Applies the operation to the target object. + /// + /// The object to have the operation applied to. + void Apply(TModel target); + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplied.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplied.cs new file mode 100644 index 00000000000..ad8926e832d --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplied.cs @@ -0,0 +1,15 @@ +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Event for when a patch operation has been applied. + /// + public interface IPatchOperationApplied + { + event PatchOperationAppliedEventHandler PatchOperationApplied; + } + + /// + /// Event handler for patch operation applied. + /// + public delegate void PatchOperationAppliedEventHandler(object sender, PatchOperationAppliedEventArgs e); +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplying.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplying.cs new file mode 100644 index 00000000000..fd41c848842 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/IPatchOperationApplying.cs @@ -0,0 +1,15 @@ +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Event for when a patch operation is about to be applied + /// + public interface IPatchOperationApplying + { + event PatchOperationApplyingEventHandler PatchOperationApplying; + } + + /// + /// Event handler for patch operation applying. + /// + public delegate void PatchOperationApplyingEventHandler(object sender, PatchOperationApplyingEventArgs e); +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchDocument.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchDocument.cs new file mode 100644 index 00000000000..02166aface3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchDocument.cs @@ -0,0 +1,13 @@ +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi.Patch.Json +{ + /// + /// The JSON model for JSON Patch Operations + /// + [ClientIncludeModel] + public class JsonPatchDocument : List + { + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchOperation.cs new file mode 100644 index 00000000000..08cf4516a5b --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/Json/JsonPatchOperation.cs @@ -0,0 +1,38 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi.Patch.Json +{ + /// + /// The JSON model for a JSON Patch operation + /// + [DataContract] + public class JsonPatchOperation + { + /// + /// The patch operation + /// + [DataMember(Name = "op", IsRequired = true)] + public Operation Operation { get; set; } + + /// + /// The path for the operation. + /// In the case of an array, a zero based index can be used to specify the position in the array (e.g. /biscuits/0/name). The "-" character can be used instead of an index to insert at the end of the array (e.g. /biscuits/-). + /// + [DataMember(IsRequired = true)] + public string Path { get; set; } + + /// + /// The path to copy from for the Move/Copy operation. + /// + /// + [DataMember] + public string From { get; set; } + + /// + /// The value for the operation. + /// This is either a primitive or a JToken. + /// + [DataMember] + public object Value { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/ObjectDictionaryConverter.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/ObjectDictionaryConverter.cs new file mode 100644 index 00000000000..fec6cb8b993 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/ObjectDictionaryConverter.cs @@ -0,0 +1,36 @@ +using System; +using System.Collections.Generic; +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; + +namespace GitHub.Services.WebApi.Patch +{ + internal class ObjectDictionaryConverter : CustomCreationConverter> + { + public override IDictionary Create(Type objectType) + { + return new Dictionary(); + } + + public override bool CanConvert(Type objectType) + { + // in addition to handling IDictionary + // we want to handle the deserialization of dict value + // which is of type object + return objectType == typeof(object) || base.CanConvert(objectType); + } + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.StartObject || + reader.TokenType == JsonToken.Null) + { + return base.ReadJson(reader, objectType, existingValue, serializer); + } + + // if the next token is not an object + // then fall back on standard deserializer (strings, numbers etc.) + return serializer.Deserialize(reader); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/Operation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/Operation.cs new file mode 100644 index 00000000000..10883914ae2 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/Operation.cs @@ -0,0 +1,14 @@ +namespace GitHub.Services.WebApi.Patch +{ + // See RFC 6902 - JSON Patch for more details. + // http://www.faqs.org/rfcs/rfc6902.html + public enum Operation + { + Add, + Remove, + Replace, + Move, + Copy, + Test + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperation.cs new file mode 100644 index 00000000000..433100c77d9 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperation.cs @@ -0,0 +1,400 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Patch.Json; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// PatchOperation represents a single JSON Patch operation. + /// + /// The model to validate and apply the patch operation against. + public abstract class PatchOperation : IPatchOperation + { + /// + /// The JSON Patch representation of insertion at the end of a list. + /// + public const string EndOfIndex = "-"; + + /// + /// The JSON Patch path separator. + /// + public const string PathSeparator = "/"; + + /// + /// The serializer that handles the object dictionary case. + /// + private static JsonSerializer serializer; + + /// + /// The path split into a string IEnumerable. + /// + private IEnumerable evaluatedPath; + + /// + /// Static constructor to create the serializer once with the + /// ObjectDictionaryConverter which converts JObject to dictionary + /// when the underlying type of the target is an object. + /// + static PatchOperation() + { + serializer = new JsonSerializer(); + serializer.Converters.Add(new ObjectDictionaryConverter()); + } + + /// + /// Event fired before applying a patch operation. + /// + public event PatchOperationApplyingEventHandler PatchOperationApplying; + + /// + /// Event fired after a patch operation has been applied. + /// + public event PatchOperationAppliedEventHandler PatchOperationApplied; + + /// + /// The operation to perform. + /// + public Operation Operation { get; protected set; } + + /// + /// The JSON path to apply on the model for this operation. + /// + public string Path { get; protected set; } + + /// + /// The path to apply that has been converted to an IEnumerable. + /// + public IEnumerable EvaluatedPath + { + get + { + if (this.evaluatedPath == null && this.Path != null) + { + this.evaluatedPath = SplitPath(this.Path); + } + + return this.evaluatedPath; + } + } + + /// + /// The path to copy/move from, applies only to the Copy/Move operation. + /// + public string From { get; protected set; } + + /// + /// The value to set with this patch operation. Only applies to + /// Add/Replace/Test. + /// + /// The strongly (best effort) typed representation of the value. + public object Value { get; protected set; } + + /// + /// Applies the operation to the target object. + /// + /// The object to have the operation applied to. + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract void Apply(TModel target); + + /// + /// Creates the strongly typed PatchOperation from the json patch operation provided. + /// + /// The json patch operation. + /// The strongly typed patch operation. + [EditorBrowsable(EditorBrowsableState.Never)] + public static PatchOperation CreateFromJson(JsonPatchOperation operation) + { + if (operation != null) + { + switch (operation.Operation) + { + case Operation.Add: + return AddPatchOperation.CreateFromJson(operation); + case Operation.Replace: + return ReplacePatchOperation.CreateFromJson(operation); + case Operation.Test: + return TestPatchOperation.CreateFromJson(operation); + case Operation.Remove: + return RemovePatchOperation.CreateFromJson(operation); + default: + throw new PatchOperationFailedException(PatchResources.MoveCopyNotImplemented()); + } + } + + throw new VssPropertyValidationException("Operation", PatchResources.InvalidOperation()); + } + + /// + /// Validates the path for the operation. + /// + protected static void ValidatePath(JsonPatchOperation operation) + { + // Path cannot be null, but it can be empty. + if (operation.Path == null) + { + throw new VssPropertyValidationException("Path", PatchResources.PathCannotBeNull()); + } + + // If it is not empty and does not start with /, this is an error per RFC. + if (!operation.Path.StartsWith(PathSeparator) && !string.IsNullOrEmpty(operation.Path)) + { + throw new VssPropertyValidationException("Path", PatchResources.PathInvalidStartValue()); + } + + // Ending in / is not valid.. + if (operation.Path.EndsWith(PathSeparator)) + { + throw new VssPropertyValidationException("Path", PatchResources.PathInvalidEndValue()); + } + + // Only add operations allow insert. + if (operation.Operation != Operation.Add) + { + if (operation.Path.EndsWith(EndOfIndex)) + { + throw new VssPropertyValidationException("Path", PatchResources.InsertNotSupported(operation.Operation)); + } + } + } + + /// + /// Validates the type for the operation. + /// + protected static void ValidateType(JsonPatchOperation operation) + { + ValidateAndGetType(operation); + } + + /// + /// Validates and returns the type for the operation. + /// + /// + /// + protected static Type ValidateAndGetType(JsonPatchOperation operation) + { + var type = GetType(typeof(TModel), operation.Path); + if (type == null) + { + throw new VssPropertyValidationException("Path", PatchResources.UnableToEvaluatePath(operation.Path)); + } + + return type; + } + + /// + /// Validates the path evaluates to a property on the model, and + /// returns the strongly typed value for the model. + /// + protected static object ValidateAndGetValue(JsonPatchOperation operation) + { + var type = ValidateAndGetType(operation); + + object value; + if (operation.Value == null) + { + value = null; + } + else + { + value = DeserializeValue(type, operation.Value); + } + + return value; + } + + /// + /// Gets The type of the field the path maps to. + /// + /// The type of the parent object. + /// The path to evaluate. + /// The type of the field that path maps to. + private static Type GetType(Type type, string path) + { + return GetType(type, SplitPath(path)); + } + + /// + /// Gets The type of the field the path maps to. + /// + /// The type of the parent object. + /// The path enumeration to evaluate. + /// The type of the field that path maps to. + private static Type GetType(Type type, IEnumerable path) + { + var current = path.First(); + Type currentType = null; + + // The start of the path should always be an empty string after splitting. + if (string.IsNullOrEmpty(current)) + { + currentType = type; + } + else if (type.IsList()) + { + currentType = type.GenericTypeArguments[0]; + } + else if (type.IsDictionary()) + { + currentType = type.GenericTypeArguments[1]; + } + else + { + currentType = type.GetMemberType(current); + } + + // Couldn't map the type, return null and let consumer handle. + if (currentType == null) + { + return null; + } + // The end of the list, this must be the type we're looking for. + else if (path.Count() == 1) + { + return currentType; + } + else + { + return GetType(currentType, path.Skip(1)); + } + } + + /// + /// Deserializes the json value. + /// + /// + /// The json formatted value. + /// The strongly typed (best effort) value. + private static object DeserializeValue(Type type, object jsonValue) + { + object value = null; + if (jsonValue is JToken) + { + try + { + value = ((JToken)jsonValue).ToObject(type, serializer); + } + catch (JsonException ex) + { + throw new VssPropertyValidationException("Value", PatchResources.InvalidValue(jsonValue, type), ex); + } + } + else + { + // Not a JToken, so it must be a primitive type. Will + // attempt to convert to the requested type. + if (type.IsAssignableOrConvertibleFrom(jsonValue)) + { + value = ConvertUtility.ChangeType(jsonValue, type); + } + else + { + Guid guidValue; + if (Guid.TryParse((string)jsonValue, out guidValue)) + { + value = guidValue; + } + else + { + throw new VssPropertyValidationException("Value", PatchResources.InvalidValue(jsonValue, type)); + } + } + } + + return value; + } + + /// + /// Converts the string path into the evaluatable path. + /// + private static IEnumerable SplitPath(string path) + { + return path.Split(new[] { PathSeparator }, StringSplitOptions.None); + } + + /// + /// Evaluates the path on the target and applies an action to the result. + /// + /// The target object to apply the operation to. + /// The action to apply to the result of the evaluation. + protected void Apply(object target, Action actionToApply) + { + this.Apply(target, this.EvaluatedPath, actionToApply); + } + + /// + /// Evaluates the path on the target and applies an action to the result. + /// + /// The target object to apply the operation to. + /// The path to evaluate. + /// The action to apply to the result of the evaluation. + private void Apply(object target, IEnumerable path, Action actionToApply) + { + var current = path.First(); + var type = target.GetType(); + + // We're at the end, time to apply the action. + if (path.Count() == 1) + { + if (PatchOperationApplying != null) + { + PatchOperationApplying(this, new PatchOperationApplyingEventArgs(this.EvaluatedPath, this.Operation)); + } + + actionToApply(type, target, current); + + if (PatchOperationApplied != null) + { + PatchOperationApplied(this, new PatchOperationAppliedEventArgs(this.EvaluatedPath, this.Operation)); + } + } + else + { + object newTarget = null; + + // The start of the path should always be an empty string after splitting. + // We just assign target to new target and move down the path. + if (string.IsNullOrEmpty(current)) + { + newTarget = target; + } + // If the next level is a dictionary, we want to get object at the key. + else if (type.IsDictionary()) + { + var dictionary = ((IDictionary)target); + if (dictionary.Contains(current)) + { + newTarget = dictionary[current]; + } + } + else if (type.IsList()) + { + var list = (IList)target; + int index; + if (int.TryParse(current, out index) && + list.Count > index) + { + newTarget = ((IList)target)[index]; + } + } + else + { + newTarget = type.GetMemberValue(current, target); + } + + if (newTarget == null) + { + // An extra layer of protection, since this should never happen because the earlier call to GetType would have failed. + throw new PatchOperationFailedException(PatchResources.TargetCannotBeNull()); + } + + this.Apply(newTarget, path.Skip(1), actionToApply); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationAppliedEventArgs.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationAppliedEventArgs.cs new file mode 100644 index 00000000000..b647e7ad271 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationAppliedEventArgs.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Event args for the applied patch operation. + /// + public class PatchOperationAppliedEventArgs + { + public PatchOperationAppliedEventArgs(IEnumerable path, Operation operation) + { + this.Path = path; + this.Operation = operation; + } + + /// + /// The current path. + /// + public IEnumerable Path { get; private set; } + + /// + /// The operation being applied. + /// + public Operation Operation { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationApplyingEventArgs.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationApplyingEventArgs.cs new file mode 100644 index 00000000000..584bf821898 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/PatchOperationApplyingEventArgs.cs @@ -0,0 +1,26 @@ +using System.Collections.Generic; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Event args for the applying patch operation. + /// + public class PatchOperationApplyingEventArgs + { + public PatchOperationApplyingEventArgs(IEnumerable path, Operation operation) + { + this.Path = path; + this.Operation = operation; + } + + /// + /// The current path. + /// + public IEnumerable Path { get; private set; } + + /// + /// The operation about to be applied. + /// + public Operation Operation { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/RemovePatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/RemovePatchOperation.cs new file mode 100644 index 00000000000..051613176b7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/RemovePatchOperation.cs @@ -0,0 +1,79 @@ +using System.Collections; +using System.ComponentModel; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Represents the JSON Patch Remove operation. + /// + /// The model the patch operation applies to. + public class RemovePatchOperation : PatchOperation + { + public RemovePatchOperation() + { + this.Operation = Operation.Remove; + } + + public RemovePatchOperation(string path) : this() + { + this.Path = path; + } + + /// + /// Creates the strongly typed PatchOperation and validates the operation. + /// + /// The simple json patch operation model. + /// A valid and strongly typed PatchOperation. + [EditorBrowsable(EditorBrowsableState.Never)] + public static new PatchOperation CreateFromJson(JsonPatchOperation operation) + { + ValidatePath(operation); + ValidateType(operation); + + if (operation.Value != null) + { + throw new VssPropertyValidationException("Value", PatchResources.ValueNotNull()); + } + + return new RemovePatchOperation(operation.Path); + } + + /// + /// Applies the Remove patch operation to the target + /// + /// The object to apply the operation to. + public override void Apply(TModel target) + { + this.Apply( + target, + (type, parent, current) => + { + if (type.IsList()) + { + var list = (IList)parent; + int index; + if (int.TryParse(current, out index) && + list.Count > index) + { + list.RemoveAt(index); + } + else + { + // We can't remove outside the rangeof the list. + throw new PatchOperationFailedException(PatchResources.IndexOutOfRange(this.Path)); + } + } + else if (type.IsDictionary()) + { + ((IDictionary)parent).Remove(current); + } + else + { + type.SetMemberValue(current, parent, this.Value); + } + }); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/ReplacePatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/ReplacePatchOperation.cs new file mode 100644 index 00000000000..dda4e91b13e --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/ReplacePatchOperation.cs @@ -0,0 +1,90 @@ +using System.Collections; +using System.ComponentModel; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Represents the JSON Patch Replace operation. + /// + /// The model the patch operation applies to. + public class ReplacePatchOperation : PatchOperation + { + public ReplacePatchOperation() + { + this.Operation = Operation.Replace; + } + + public ReplacePatchOperation(string path, object value): this() + { + this.Path = path; + this.Value = value; + } + + /// + /// Creates the strongly typed PatchOperation and validates the operation. + /// + /// The simple json patch operation model. + /// A valid and strongly typed PatchOperation. + [EditorBrowsable(EditorBrowsableState.Never)] + public static new PatchOperation CreateFromJson(JsonPatchOperation operation) + { + ValidatePath(operation); + + var value = ValidateAndGetValue(operation); + if (value == null) + { + throw new VssPropertyValidationException("Value", PatchResources.ValueCannotBeNull()); + } + + return new ReplacePatchOperation(operation.Path, value); + } + + /// + /// Applies the Replace patch operation to the target + /// + /// The object to apply the operation to. + public override void Apply(TModel target) + { + this.Apply( + target, + (type, parent, current) => + { + if (type.IsList()) + { + var list = (IList)parent; + int index; + if (int.TryParse(current, out index) && + list.Count > index) + { + list[index] = this.Value; + } + else + { + throw new PatchOperationFailedException(PatchResources.CannotReplaceNonExistantValue(this.Path)); + } + } + else if (type.IsDictionary()) + { + var dictionary = (IDictionary)parent; + if (!dictionary.Contains(current)) + { + throw new InvalidPatchFieldNameException(PatchResources.InvalidFieldName(current)); + } + + dictionary[current] = this.Value; + } + else + { + if (type.GetMemberValue(current, parent) == null) + { + throw new PatchOperationFailedException(PatchResources.CannotReplaceNonExistantValue(this.Path)); + } + + type.SetMemberValue(current, parent, this.Value); + } + }); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Patch/TestPatchOperation.cs b/src/Sdk/WebApi/WebApi/Contracts/Patch/TestPatchOperation.cs new file mode 100644 index 00000000000..832ccf8efe6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Patch/TestPatchOperation.cs @@ -0,0 +1,115 @@ +using System.Collections; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Services.WebApi.Patch +{ + /// + /// Represents the JSON Patch Test operation. + /// + /// The model the patch operation applies to. + public class TestPatchOperation : PatchOperation + { + public TestPatchOperation() + { + this.Operation = Operation.Test; + } + + public TestPatchOperation(string path, object value): this() + { + this.Path = path; + this.Value = value; + } + + /// + /// Creates the strongly typed PatchOperation and validates the operation. + /// + /// The simple json patch operation model. + /// A valid and strongly typed PatchOperation. + public static new PatchOperation CreateFromJson(JsonPatchOperation operation) + { + ValidatePath(operation); + + return new TestPatchOperation(operation.Path, ValidateAndGetValue(operation)); + } + + /// + /// Applies the Test patch operation to the target + /// + /// The object to apply the operation to. + public override void Apply(TModel target) + { + this.Apply( + target, + (type, parent, current) => + { + object memberValue = null; + if (type.IsList()) + { + var list = (IList)parent; + int index; + if (int.TryParse(current, out index) && + list.Count > index) + { + memberValue = list[index]; + } + else + { + // We can't insert beyond the length of the list. + throw new PatchOperationFailedException(PatchResources.IndexOutOfRange(this.Path)); + } + } + else if (type.IsDictionary()) + { + var fieldDictionary = ((IDictionary)parent); + + if (!fieldDictionary.Contains(current)) + { + throw new InvalidPatchFieldNameException(PatchResources.InvalidFieldName(current)); + } + memberValue = fieldDictionary[current]; + } + else + { + memberValue = type.GetMemberValue(current, parent); + } + + var success = false; + if (memberValue != null) + { + if (memberValue is IList) + { + // TODO: Implement + throw new PatchOperationFailedException(PatchResources.TestNotImplementedForList()); + } + else if (memberValue is IDictionary) + { + // TODO: Implement + throw new PatchOperationFailedException(PatchResources.TestNotImplementedForDictionary()); + } + else if (memberValue.GetType().IsAssignableOrConvertibleFrom(this.Value)) + { + // We convert the objects since we need the values unboxed. + var convertedMemberValue = ConvertUtility.ChangeType(memberValue, memberValue.GetType()); + var convertedValue = ConvertUtility.ChangeType(this.Value, memberValue.GetType()); + + success = convertedMemberValue.Equals(convertedValue); + } + else + { + success = memberValue.Equals(this.Value); + } + } + else + { + success = object.Equals(memberValue, this.Value); + } + + if (!success) + { + throw new TestPatchOperationFailedException(PatchResources.TestFailed(this.Path, memberValue, this.Value)); + } + }); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributeDescriptor.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributeDescriptor.cs new file mode 100644 index 00000000000..62c63b8187a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributeDescriptor.cs @@ -0,0 +1,139 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + /// + /// Identifies an attribute with a name and a container. + /// + public class AttributeDescriptor : IComparable, ICloneable + { + /// + /// Constructor + /// + public AttributeDescriptor(string containerName, string attributeName) + { + //Validation in setters... + AttributeName = attributeName; + ContainerName = containerName; + } + + /// + /// The name of the attribute. + /// + [DataMember(IsRequired = true, EmitDefaultValue = false)] + public string AttributeName + { + get + { + return m_attributeName; + } + set + { + ProfileArgumentValidation.ValidateAttributeName(value); + m_attributeName = value; + } + } + + /// + /// The container the attribute resides in. + /// + [DataMember(IsRequired = true, EmitDefaultValue = false)] + public string ContainerName + { + get + { + return m_containerName; + } + set + { + ProfileArgumentValidation.ValidateContainerName(value); + m_containerName = value; + } + } + + private string m_attributeName; + private string m_containerName; + + public int CompareTo(AttributeDescriptor obj) + { + if (this == obj) return 0; + if (obj == null) return 1; + + int retValue; + if ((retValue = VssStringComparer.AttributesDescriptor.Compare(this.AttributeName, obj.AttributeName)) != 0) + { + return retValue; + } + + return VssStringComparer.AttributesDescriptor.Compare(this.ContainerName, obj.ContainerName); + } + + public override bool Equals(object obj) + { + if (obj == null || GetType() != obj.GetType()) + { + return false; + } + + return CompareTo((AttributeDescriptor) obj) == 0; + } + + public override int GetHashCode() + { + return this.ContainerName.GetHashCode() + this.AttributeName.GetHashCode(); + } + + public object Clone() + { + return new AttributeDescriptor(ContainerName, AttributeName); + } + + public override string ToString() + { + return string.Concat(ContainerName,";",AttributeName); + } + } + + /// + /// Class used for comparing AttributeDescriptors + /// + public class AttributeDescriptorComparer : IComparer, IEqualityComparer + { + private AttributeDescriptorComparer() { } + + public int Compare(AttributeDescriptor x, AttributeDescriptor y) + { + if (x == y) return 0; + if (x == null && y != null) return -1; + if (x != null && y == null) return 1; + + return (x.CompareTo(y)); + } + + public bool Equals(AttributeDescriptor x, AttributeDescriptor y) + { + return Compare(x, y) == 0; + } + + public int GetHashCode(AttributeDescriptor obj) + { + return obj.GetHashCode(); + } + + public static AttributeDescriptorComparer Instance + { + get + { + return s_instance; + } + } + + private static AttributeDescriptorComparer s_instance = new AttributeDescriptorComparer(); + } + + +} + diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesContainer.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesContainer.cs new file mode 100644 index 00000000000..07c03beab5d --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesContainer.cs @@ -0,0 +1,65 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + /// + /// Stores a set of named profile attributes. + /// + [DataContract] + public class AttributesContainer : IVersioned, ICloneable + { + public AttributesContainer(string containerName) : this() + { + ContainerName = containerName; + } + + public AttributesContainer() + { + Attributes = new Dictionary(VssStringComparer.AttributesDescriptor); + } + + /// + /// The name of the container. + /// + [DataMember(IsRequired = true, EmitDefaultValue = false)] + public string ContainerName { + get + { + return m_containerName; + } + set + { + ProfileArgumentValidation.ValidateContainerName(value); + m_containerName = value; + } + } + + public object Clone() + { + AttributesContainer newContainer = (AttributesContainer)MemberwiseClone(); + + // Deep copy of attributes dictionary + newContainer.Attributes = Attributes != null ? Attributes.ToDictionary(x => x.Key, x => (ProfileAttribute)x.Value.Clone()) : null; + + return newContainer; + } + + /// + /// The attributes stored by the container. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public IDictionary Attributes { get; set; } + + /// + /// The maximum revision number of any attribute within the container. + /// + [DataMember(IsRequired = true, EmitDefaultValue = false)] + public int Revision { get; set; } + + private string m_containerName; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesQueryContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesQueryContext.cs new file mode 100644 index 00000000000..94db2400a20 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/AttributesQueryContext.cs @@ -0,0 +1,135 @@ +using GitHub.Services.Common; +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + public class AttributesQueryContext : ICloneable + { + public AttributesQueryContext( + AttributesScope scope, + DateTimeOffset? modifiedSince = null, + int? modifiedAfterRevision = null, + CoreProfileAttributes? coreAttributes = null, + string containerName = null) + { + if (scope.HasFlag(~(AttributesScope.Application | AttributesScope.Core)) + || (!scope.HasFlag(AttributesScope.Application) && !scope.HasFlag(AttributesScope.Core))) + { + throw new ArgumentException(string.Format("The scope '{0}' is not supported for this operation.", scope)); + } + + Scope = scope; + ModifiedSince = modifiedSince; + ModifiedAfterRevision = modifiedAfterRevision; + + if (scope.HasFlag(AttributesScope.Application)) + { + ProfileArgumentValidation.ValidateApplicationContainerName(containerName); + ContainerName = containerName; + } + else + { + ContainerName = null; + } + + if (scope.HasFlag(AttributesScope.Core)) + { + CoreAttributes = coreAttributes ?? CoreProfileAttributes.All; + } + else + { + CoreAttributes = null; + } + } + + public AttributesQueryContext(AttributesScope scope, string containerName) + : this(scope, null, null, CoreProfileAttributes.All, containerName) + { + } + + /// + /// Deprecated constructor. The operation to 'get attributes since a certain point in time' is now deprecated. + /// > + public AttributesQueryContext(AttributesScope scope, DateTimeOffset modifiedSince, string containerName = null) + : this(scope, modifiedSince, null, CoreProfileAttributes.All, containerName) + { + } + + public AttributesQueryContext(AttributesScope scope, int modifiedAfterRevision, string containerName = null) + : this(scope, null, modifiedAfterRevision, CoreProfileAttributes.All, containerName) + { + } + + [DataMember(IsRequired = true)] + public AttributesScope Scope { get; private set; } + + [DataMember] + public string ContainerName { get; private set; } + + [DataMember] + public DateTimeOffset? ModifiedSince { get; private set; } + + [DataMember] + public int? ModifiedAfterRevision { get; private set; } + + [DataMember] + public CoreProfileAttributes? CoreAttributes { get; private set; } + + public override bool Equals(object obj) + { + if (obj == null || GetType() != obj.GetType()) + { + return false; + } + + var other = obj as AttributesQueryContext; + + return this.Equals(other); + } + + public bool Equals(AttributesQueryContext other) + { + return (Scope == other.Scope && + VssStringComparer.AttributesDescriptor.Equals(ContainerName, other.ContainerName) && + ModifiedSince == other.ModifiedSince && + ModifiedAfterRevision == other.ModifiedAfterRevision); + } + + public override int GetHashCode() + { + int hashCode = Scope.GetHashCode(); + hashCode = (hashCode * 499) ^ (ContainerName != null ? ContainerName.ToLowerInvariant().GetHashCode() : 0); + hashCode = (hashCode * 499) ^ (ModifiedSince != null ? ModifiedSince.GetHashCode() : 0); + hashCode = (hashCode * 499) ^ (ModifiedAfterRevision != null ? ModifiedAfterRevision.GetHashCode() : 0); + hashCode = (hashCode * 499) ^ (CoreAttributes != null ? CoreAttributes.GetHashCode() : 0); + + return hashCode; + } + + public object Clone() + { + return MemberwiseClone(); + } + } + + /// + /// Used to specify the scope of a set of attributes. + /// + /// + /// A profile attribute is either a core attribute or an attribute beloging to some application container. + /// A core attribute belongs to scope AttributesScope.Core. + /// An attribute stored under some application container belongs to scope AttributesScope.Application. + /// An attribute always belongs to scope AttributesScope.Core | AttributesScope.Application. + /// + [DataContract, Flags] + public enum AttributesScope + { + [EnumMember] + Core = 0x1, + + [EnumMember] + Application = 0x2, + } + +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/Avatar.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/Avatar.cs new file mode 100644 index 00000000000..b38f1d9f6ae --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/Avatar.cs @@ -0,0 +1,48 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + [DataContract] + public class Avatar : ITimeStamped, ICloneable + { + [DataMember] + public byte[] Value { get; set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTimeOffset TimeStamp { get; set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public AvatarSize Size { get; internal set; } + + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public bool? IsAutoGenerated { get; internal set; } + + public object Clone() + { + var clone = new Avatar() + { + Size = this.Size, + TimeStamp = this.TimeStamp, + IsAutoGenerated = this.IsAutoGenerated + }; + if (Value != null) + { + clone.Value = new byte[Value.Length]; + Buffer.BlockCopy(Value, 0, clone.Value, 0, Value.Length); + } + return clone; + } + } + + /// + /// Small = 34 x 34 pixels; Medium = 44 x 44 pixels; Large = 220 x 220 pixels + /// + [DataContract] + public enum AvatarSize + { + Small, + Medium, + Large, + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/CoreProfileAttribute.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/CoreProfileAttribute.cs new file mode 100644 index 00000000000..2fcbcba446a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/CoreProfileAttribute.cs @@ -0,0 +1,12 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + /// + /// A profile attribute which always has a value for each profile. + /// + [DataContract] + public class CoreProfileAttribute : ProfileAttributeBase + { + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/ITimeStamped.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/ITimeStamped.cs new file mode 100644 index 00000000000..dd3eb87adb3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/ITimeStamped.cs @@ -0,0 +1,11 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + public interface ITimeStamped + { + [DataMember(IsRequired = false, EmitDefaultValue = false)] + DateTimeOffset TimeStamp { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/IVersioned.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/IVersioned.cs new file mode 100644 index 00000000000..cb622d5ecda --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/IVersioned.cs @@ -0,0 +1,10 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + public interface IVersioned + { + [DataMember(IsRequired = false, EmitDefaultValue = false)] + int Revision { get; } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/Profile.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/Profile.cs new file mode 100644 index 00000000000..7ba1e71a586 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/Profile.cs @@ -0,0 +1,234 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + /// + /// A user profile. + /// + [DataContract] + public class Profile : ITimeStamped, IVersioned, ICloneable + { + public Profile() + { + CoreAttributes = new Dictionary(VssStringComparer.AttributesDescriptor); + } + + public string DisplayName + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.DisplayName, null); } + set { SetAttributeInCoreContainer(CoreAttributeNames.DisplayName, value); } + } + + public string PublicAlias + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.PublicAlias, null); } + set { SetAttributeInCoreContainer(CoreAttributeNames.PublicAlias, value); } + } + + public string CountryName + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.CountryName, null); } + set { SetAttributeInCoreContainer(CoreAttributeNames.CountryName, value); } + } + + public string EmailAddress + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.EmailAddress, null); } + set { SetAttributeInCoreContainer(CoreAttributeNames.EmailAddress, value); } + } + + public string UnconfirmedEmailAddress + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.UnconfirmedEmailAddress, null); } + set { SetAttributeInCoreContainer(CoreAttributeNames.UnconfirmedEmailAddress, value); } + } + + public DateTimeOffset CreatedDateTime + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.DateCreated, default(DateTimeOffset)); } + set { SetAttributeInCoreContainer(CoreAttributeNames.DateCreated, value); } + } + + public Avatar Avatar + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.Avatar, null); } + set { SetAttributeInCoreContainer(CoreAttributeNames.Avatar, value); } + } + + /// + /// The attributes of this profile. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public AttributesContainer ApplicationContainer { get; set; } + + /// + /// The core attributes of this profile. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + internal IDictionary CoreAttributes { get; set; } + + /// + /// The maximum revision number of any attribute. + /// + [DataMember(IsRequired = true, EmitDefaultValue = false)] + public int CoreRevision { get; set; } + + /// + /// The time at which this profile was last changed. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTimeOffset TimeStamp { get; set; } + + /// + /// The unique identifier of the profile. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public Guid Id { get; internal set; } + + /// + /// The maximum revision number of any attribute. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public int Revision { get; set; } + + /// + /// The current state of the profile. + /// + [DataMember(IsRequired = false)] + public ProfileState ProfileState { get; set; } + + public int TermsOfServiceVersion + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.TermsOfServiceVersion, 0); } + set { SetAttributeInCoreContainer(CoreAttributeNames.TermsOfServiceVersion, value); } + } + + public DateTimeOffset TermsOfServiceAcceptDate + { + get { return GetAttributeFromCoreContainer(CoreAttributeNames.TermsOfServiceAcceptDate, default(DateTimeOffset)); } + set { SetAttributeInCoreContainer(CoreAttributeNames.TermsOfServiceAcceptDate, value); } + } + + public bool? ContactWithOffers + { + get + { + CoreProfileAttribute attribute; + CoreAttributes.TryGetValue(CoreAttributeNames.ContactWithOffers, out attribute); + if (attribute != null && attribute.Value != null && attribute.Value is bool) + { + return (bool?)attribute.Value; + } + return null; + } + set { SetAttributeInCoreContainer(CoreAttributeNames.ContactWithOffers, value); } + } + + private T GetAttributeFromCoreContainer(string attributeName, T defaultValue) + { + CoreProfileAttribute attribute; + CoreAttributes.TryGetValue(attributeName, out attribute); + + if (attribute != null && attribute.Value != null && attribute.Value.GetType() == typeof(T)) + { + return (T)attribute.Value; + } + return defaultValue; + } + + private void SetAttributeInCoreContainer(string attributeName, object value) + { + CoreProfileAttribute attribute; + if (CoreAttributes.TryGetValue(attributeName, out attribute)) + { + attribute.Value = value; + } + else + { + CoreAttributes.Add(attributeName, new CoreProfileAttribute() + { + Descriptor = new AttributeDescriptor(CoreContainerName, attributeName), + Value = value, + }); + } + } + + public CoreProfileAttribute GetCoreAttribute(string attributeName) + { + CoreProfileAttribute attribute; + CoreAttributes.TryGetValue(attributeName, out attribute); + if (attribute == null) + { + return null; + } + return (CoreProfileAttribute)attribute.Clone(); + } + + public object Clone() + { + Profile newProfile = MemberwiseClone() as Profile; + + // Since core attributes are cloned on read, we can get away with a shallow copy + newProfile.CoreAttributes = CoreAttributes != null ? CoreAttributes.ToDictionary(x => x.Key, x => (CoreProfileAttribute) x.Value.Clone()) : null; + newProfile.ApplicationContainer = ApplicationContainer != null ? (AttributesContainer)ApplicationContainer.Clone() : null; + + return newProfile; + } + + internal const string CoreContainerName = "Core"; + + internal class CoreAttributeNames + { + internal const string DisplayName = "DisplayName"; + internal const string PublicAlias = "PublicAlias"; + internal const string EmailAddress = "EmailAddress"; + internal const string DefaultEmailAddress = "DefaultEmailAddress"; + internal const string UnconfirmedEmailAddress = "UnconfirmedEmailAddress"; + internal const string CountryName = "CountryName"; + internal const string Avatar = "Avatar"; + internal const string TermsOfServiceVersion = "TermsOfServiceVersion"; + internal const string TermsOfServiceAcceptDate = "TermsOfServiceAcceptDate"; + internal const string ContactWithOffers = "ContactWithOffers"; + internal const string DateCreated = "DateCreated"; + + internal static readonly List AttributeNameList = new List() + { + DisplayName, + PublicAlias, + EmailAddress, + UnconfirmedEmailAddress, + CountryName, + Avatar, + TermsOfServiceVersion, + TermsOfServiceAcceptDate, + ContactWithOffers, + DateCreated + }; + } + } + + /// + /// The state of a profile. + /// + public enum ProfileState + { + /// + /// The profile is in use. + /// + Custom = 0, + + /// + /// The profile is in use, but can only be read. + /// + CustomReadOnly = 1, + + /// + /// The profile may only be read. + /// + ReadOnly = 2 + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttribute.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttribute.cs new file mode 100644 index 00000000000..b62aac92ab8 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttribute.cs @@ -0,0 +1,12 @@ +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + /// + /// A named object associated with a profile. + /// + [DataContract] + public class ProfileAttribute : ProfileAttributeBase + { + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttributeBase.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttributeBase.cs new file mode 100644 index 00000000000..f667c3da589 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileAttributeBase.cs @@ -0,0 +1,41 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + [DataContract] + public class ProfileAttributeBase : ITimeStamped, IVersioned, ICloneable + { + /// + /// The descriptor of the attribute. + /// + [DataMember] + public AttributeDescriptor Descriptor { get; set; } + + /// + /// The value of the attribute. + /// + [DataMember] + public T Value { get; set; } + + /// + /// The time the attribute was last changed. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public DateTimeOffset TimeStamp { get; set; } + + /// + /// The revision number of the attribute. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public int Revision { get; set; } + + public object Clone() + { + ProfileAttributeBase newProfileAttribute = (ProfileAttributeBase)MemberwiseClone(); + newProfileAttribute.Descriptor = Descriptor != null ? (AttributeDescriptor)Descriptor.Clone() : null; + newProfileAttribute.Value = Value is ICloneable ? (T)((ICloneable)Value).Clone() : Value; + return newProfileAttribute; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileQueryContext.cs b/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileQueryContext.cs new file mode 100644 index 00000000000..69daf560f65 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Profile/ProfileQueryContext.cs @@ -0,0 +1,51 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.Profile +{ + public class ProfileQueryContext + { + public ProfileQueryContext(AttributesScope scope, string containerName = null) + : this(scope, CoreProfileAttributes.All, containerName) + { + } + + public ProfileQueryContext(AttributesScope scope, CoreProfileAttributes coreAttributes, string containerName = null) + { + ContainerScope = scope; + CoreAttributes = coreAttributes; + switch (scope) + { + case AttributesScope.Core: + ContainerName = null; + break; + case AttributesScope.Core | AttributesScope.Application: + ProfileArgumentValidation.ValidateApplicationContainerName(containerName); + ContainerName = containerName; + break; + default: + throw new ArgumentException(string.Format("The scope '{0}' is not supported for this operation.", scope)); + } + } + + [DataMember(IsRequired = true)] + public AttributesScope ContainerScope { get; private set; } + + [DataMember] + public string ContainerName { get; private set; } + + [DataMember] + public CoreProfileAttributes CoreAttributes { get; private set; } + } + + [Flags] + public enum CoreProfileAttributes + { + Minimal = 0x0000, // Does not contain email, avatar, display name, or marketing preferences + Email = 0x0001, + Avatar = 0x0002, + DisplayName = 0x0004, + ContactWithOffers = 0x0008, + All = 0xFFFF, + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/PropertiesCollection.cs b/src/Sdk/WebApi/WebApi/Contracts/PropertiesCollection.cs new file mode 100644 index 00000000000..5b7748df4c3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/PropertiesCollection.cs @@ -0,0 +1,462 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.Common.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi +{ + //extended properties are serialized with many core types, + //so a single contract to deal with them + //the server side TeamFoundationPropertiesService stores five types in their native format + //: Byte[], Int32, Double, DateType and String. + //JSON.NET deals correctly with Double, DateType and String, but can't discern the proper + //type of Byte[] and Int32 on deserization if deserializing into Object. Byte[] gets serialized as a + //Base64 encoded string, and stays that way. All integers get serialized as Int64, and stay that way + //on deserialization. Adding ItemTypeNameHandling=TypeNameHandling.All fixed Byte[] but not Int32, it turns + //out that they only primitive type that gets the name is byte[]... + //So we implemented the PropertiesCollectionItemConverter to preserve the type. + //PropertyValidation accepts the 5 types named above, plus any other Primitive type (any type with a TypeCode != TypeCode.Object) + //Except for DBNull. We also accept Guid. Types *not* in the set of five (including Guid) are stored as String in the DB + //and come back as that from the service. There is a special TryGetValue that can be used to try to convert the type + //from string back to the type it is supposed to be. + + + /// + /// The class represents a property bag as a collection of key-value pairs. Values of all primitive types (any type with a `TypeCode != TypeCode.Object`) + /// except for `DBNull` are accepted. Values of type Byte[], Int32, Double, DateType and String preserve their type, + /// other primitives are retuned as a String. Byte[] expected as base64 encoded string. + /// + [CollectionDataContract(Name = "Properties", ItemName = "Property", KeyName = "Key", ValueName = "Value")] + [JsonDictionary(ItemConverterType = typeof(PropertiesCollectionItemConverter))] + public sealed class PropertiesCollection : IDictionary, ICollection + { + public PropertiesCollection() + { + m_innerDictionary = new Dictionary(VssStringComparer.PropertyName); + this.ValidateNewValues = true; + } + + public PropertiesCollection(IDictionary source) : this(source, validateExisting: true) + { + } + + internal PropertiesCollection(IDictionary source, bool validateExisting) + { + if (validateExisting) + { + PropertyValidation.ValidateDictionary(source); + } + m_innerDictionary = new Dictionary(source, VssStringComparer.PropertyName); + this.ValidateNewValues = true; + } + + private Dictionary m_innerDictionary; + + //allow containers to turn off property validation + internal Boolean ValidateNewValues + { + get; + set; + } + + #region Public Properties + /// + /// The count of properties in the collection. + /// + public Int32 Count + { + get + { + return m_innerDictionary.Count; + } + } + + /// + /// Implements IDictionary<String, Object>.Item + /// + /// + /// + public Object this[String key] + { + get + { + return m_innerDictionary[key]; + } + set + { + if (this.ValidateNewValues) + { + PropertyValidation.ValidatePropertyName(key); + PropertyValidation.ValidatePropertyValue(key, value); + } + + m_innerDictionary[key] = value; + } + } + + /// + /// The set of keys in the collection. + /// + public Dictionary.KeyCollection Keys + { + get + { + return m_innerDictionary.Keys; + } + } + + /// + /// The set of values in the collection. + /// + public Dictionary.ValueCollection Values + { + get + { + return m_innerDictionary.Values; + } + } + #endregion + + #region Public Methods + /// + /// Implements IDictionary<String, Object>.Add + /// + /// + /// + public void Add(String key, Object value) + { + if (this.ValidateNewValues) + { + PropertyValidation.ValidatePropertyName(key); + PropertyValidation.ValidatePropertyValue(key, value); + } + + m_innerDictionary.Add(key, value); + } + + /// + /// Implements ICollection<KeyValuePair<String, Object>>.Clear() + /// + public void Clear() + { + m_innerDictionary.Clear(); + } + + /// + /// Implements IDictionary<String, Object>.ContainsKey() + /// + /// + /// + public Boolean ContainsKey(String key) + { + return m_innerDictionary.ContainsKey(key); + } + + /// + /// Implements IDictionary<String, Object>.ContainsValue() + /// + /// + /// + public Boolean ContainsValue(Object value) + { + return m_innerDictionary.ContainsValue(value); + } + + /// + /// Implements IDictionary<String, Object>.Remove() + /// + /// + /// + public Boolean Remove(String key) + { + return m_innerDictionary.Remove(key); + } + + public T GetValue(String key, T defaultValue) + { + T value; + if (!TryGetValue(key, out value)) + { + value = defaultValue; + } + return value; + } + + /// + /// Implements IDictionary<String, Object>.TryGetValue() + /// + /// + /// + /// + public Boolean TryGetValue(String key, out Object value) + { + return m_innerDictionary.TryGetValue(key, out value); + } + + public Boolean TryGetValue(String key, out T value) + { + return this.TryGetValidatedValue(key, out value); + } + + public override Boolean Equals(Object otherObj) + { + if (Object.ReferenceEquals(this, otherObj)) + { + return true; + } + + PropertiesCollection otherCollection = otherObj as PropertiesCollection; + if (otherCollection == null || Count != otherCollection.Count) + { + return false; + } + else + { + Object obj; + foreach (var key in Keys) + { + if (!otherCollection.TryGetValue(key, out obj) || !obj.Equals(this[key])) + { + return false; + } + } + return true; + } + } + + public override Int32 GetHashCode() + { + return base.GetHashCode(); + } + + #endregion + + #region ICollection explicit implementation + //We implement ICollection to get the SyncRoot + void ICollection.CopyTo(Array array, int index) + { + ((ICollection)m_innerDictionary).CopyTo(array, index); + } + + Boolean ICollection.IsSynchronized + { + get + { + return ((ICollection)m_innerDictionary).IsSynchronized; + } + } + + Object ICollection.SyncRoot + { + get + { + return ((ICollection)m_innerDictionary).SyncRoot; + } + } + #endregion + + #region ICollection explicit implementation + void ICollection>.Add(KeyValuePair keyValuePair) + { + if (this.ValidateNewValues) + { + PropertyValidation.ValidatePropertyName(keyValuePair.Key); + PropertyValidation.ValidatePropertyValue(keyValuePair.Key, keyValuePair.Value); + } + + ((ICollection>)m_innerDictionary).Add(keyValuePair); + } + + Boolean ICollection>.Contains(KeyValuePair keyValuePair) + { + return ((ICollection>)m_innerDictionary).Contains(keyValuePair); + } + + void ICollection>.CopyTo(KeyValuePair[] array, Int32 index) + { + ((ICollection>)m_innerDictionary).CopyTo(array, index); + } + + Boolean ICollection>.IsReadOnly + { + get + { + return false; + } + } + + Boolean ICollection>.Remove(KeyValuePair keyValuePair) + { + return ((ICollection>)m_innerDictionary).Remove(keyValuePair); + } + #endregion + + #region IDictionary explicit implementation + ICollection IDictionary.Keys + { + get + { + return ((IDictionary)m_innerDictionary).Keys; + } + } + + ICollection IDictionary.Values + { + get + { + return ((IDictionary)m_innerDictionary).Values; + } + } + #endregion + + #region IEnumerable> explicit implementation + IEnumerator> IEnumerable>.GetEnumerator() + { + return ((IEnumerable>)m_innerDictionary).GetEnumerator(); + } + #endregion + + #region IEnumerable implementation + IEnumerator IEnumerable.GetEnumerator() + { + return ((IEnumerable)m_innerDictionary).GetEnumerator(); + } + #endregion + + #region PropertiesCollectionItemConverter class + internal class PropertiesCollectionItemConverter : JsonConverter + { + public PropertiesCollectionItemConverter() { } + + private const string TypePropertyName = "$type"; + private const string ValuePropertyName = "$value"; + + /// + /// Writes the JSON representation of the object. + /// + /// The to write to. + /// The value. + /// The calling serializer. + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + Type valueType = value.GetType(); + + // We don't want to use the type name of the enum itself; instead we marshal + // it as a decimal number inside of a string + if (valueType.GetTypeInfo().IsEnum) + { + value = ((Enum)value).ToString("D"); + valueType = typeof(String); + } + + PropertyValidation.ValidatePropertyValue(WebApiResources.SerializingPhrase(), value); + + //write out as an object with type information + writer.WriteStartObject(); + writer.WritePropertyName(TypePropertyName); + + // Check that the Type we're claiming is safely deserializable + String typeName = valueType.FullName; + + if (!PropertyValidation.IsValidTypeString(typeName)) + { + throw new PropertyTypeNotSupportedException(TypePropertyName, valueType); + } + + writer.WriteValue(typeName); + writer.WritePropertyName(ValuePropertyName); + writer.WriteValue(value); + writer.WriteEndObject(); + } + + /// + /// Reads the JSON representation of the object. + /// + /// The to read from. + /// Type of the object. + /// The existing value of object being read. + /// The calling serializer. + /// The object value. + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.StartObject) + { + JObject valueInfo = serializer.Deserialize(reader); + if (!valueInfo.TryGetValue(TypePropertyName, out JToken typeToken) || + !valueInfo.TryGetValue(ValuePropertyName, out JToken valueToken)) + { + // The following block is for compatability with old code behavior. + // The old code blindly took the first argument add treated it as the $type string, + // It blindly took the second argument and treated it as the $value object. + IEnumerator tokenEnumerator = valueInfo.Values().GetEnumerator(); + if (tokenEnumerator.MoveNext()) + { + typeToken = tokenEnumerator.Current; + if (tokenEnumerator.MoveNext()) + { + valueToken = tokenEnumerator.Current; + } + else + { + throw new InvalidOperationException(WebApiResources.DeserializationCorrupt()); + } + } + else + { + throw new InvalidOperationException(WebApiResources.DeserializationCorrupt()); + } + } + + string typeToCreate = typeToken.ToObject(); + + //make sure the string is a valid type, + //an arbitrary type string with nested generics could overflow the + //stack for a DOS. + if (!PropertyValidation.TryGetValidType(typeToCreate, out Type type)) + { + throw new InvalidOperationException(WebApiResources.DeserializationCorrupt()); + } + + //deserialize the type + return valueToken.ToObject(type); + } + else if (reader.TokenType == JsonToken.Boolean || + reader.TokenType == JsonToken.Bytes || + reader.TokenType == JsonToken.Date || + reader.TokenType == JsonToken.Float || + reader.TokenType == JsonToken.Integer || + reader.TokenType == JsonToken.String) + { + // Allow the JSON to simply specify "name": value syntax if type information is not necessary. + return serializer.Deserialize(reader); + } + else if (reader.TokenType == JsonToken.Null) + { + return null; + } + else + { + throw new InvalidOperationException(WebApiResources.DeserializationCorrupt()); + } + } + + /// + /// Determines whether this instance can convert the specified object type. + /// + /// Type of the object. + /// + /// true if this instance can convert the specified object type; otherwise, false. + /// + public override Boolean CanConvert(Type objectType) + { + return true; + } + } + #endregion + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLink.cs b/src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLink.cs new file mode 100644 index 00000000000..583efb56c75 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLink.cs @@ -0,0 +1,54 @@ +using GitHub.Services.Common; +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi +{ + /// + /// The class to represent a REST reference link. + /// + /// RFC: http://tools.ietf.org/html/draft-kelly-json-hal-06 + /// + /// The RFC is not fully implemented, additional properties are allowed on the + /// reference link but as of yet we don't have a need for them. + /// + [DataContract] + public class ReferenceLink : ISecuredObject + { + public ReferenceLink() { } + + internal ReferenceLink(ISecuredObject securedObject) + { + m_securedObject = securedObject; + } + + [DataMember] + public string Href { get; set; } + + Guid ISecuredObject.NamespaceId + { + get + { + ArgumentUtility.CheckForNull(m_securedObject, nameof(m_securedObject)); + return m_securedObject.NamespaceId; + } + } + + int ISecuredObject.RequiredPermissions + { + get + { + ArgumentUtility.CheckForNull(m_securedObject, nameof(m_securedObject)); + return m_securedObject.RequiredPermissions; + } + } + + string ISecuredObject.GetToken() + { + ArgumentUtility.CheckForNull(m_securedObject, nameof(m_securedObject)); + return m_securedObject.GetToken(); + } + + private readonly ISecuredObject m_securedObject; + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLinks.cs b/src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLinks.cs new file mode 100644 index 00000000000..35d5d9be535 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/ReferenceLink/ReferenceLinks.cs @@ -0,0 +1,304 @@ +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.ComponentModel; +using System.Xml; +using System.Xml.Schema; +using System.Xml.Serialization; +using GitHub.Services.Common; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi +{ + /// + /// The class to represent a collection of REST reference links. + /// + [XmlRoot("ReferenceLinks")] + [JsonConverter(typeof(ReferenceLinksConverter))] + public class ReferenceLinks : ICloneable, IXmlSerializable + { + /// + /// The internal representation of the reference links. + /// + private IDictionary referenceLinks = new Dictionary(); + + /// + /// Helper method to easily add a reference link to the dictionary. + /// If the specified name has already been added, the subsequent calls + /// to AddLink will create a list of reference links for the name. + /// + /// The name of the reference link. + /// The href the reference link refers to. + /// The implementation for securedObject. + [EditorBrowsable(EditorBrowsableState.Never)] + public void AddLink(string name, string href, ISecuredObject securedObject) + { + if (referenceLinks.ContainsKey(name)) + { + IList links; + if (referenceLinks[name] is ReferenceLink) + { + // promote to a list of links + links = new List(); + links.Add((ReferenceLink)referenceLinks[name]); + referenceLinks[name] = links; + } + else + { + links = (IList)referenceLinks[name]; + } + + links.Add(new ReferenceLink(securedObject) { Href = href }); + } + else + { + referenceLinks[name] = new ReferenceLink(securedObject) { Href = href }; + } + } + + /// + /// Helper method to easily add a reference link to the dictionary. + /// If the specified name has already been added, the subsequent calls + /// to AddLink will create a list of reference links for the name. + /// + /// The name of the reference link. + /// The href the reference link refers to. + public void AddLink(string name, string href) + { + AddLink(name, href, null); + } + + /// + /// Helper method to easily add a reference link to the dictionary if href is not null or empty value. + /// If the specified name has already been added, the subsequent calls to AddLink will create a list of reference links for the name. + /// + /// The name of the reference link. + /// The href the reference link refers to. + public void AddLinkIfIsNotEmpty(string name, string href) + { + if (!string.IsNullOrEmpty(href)) + { + AddLink(name, href, null); + } + } + + Object ICloneable.Clone() + { + return this.Clone(); + } + + /// + /// Creates a deep copy of the ReferenceLinks. + /// + /// A deep copy of the ReferenceLinks + public ReferenceLinks Clone() + { + ReferenceLinks linksCloned = new ReferenceLinks(); + this.CopyTo(linksCloned); + return linksCloned; + } + + /// + /// Copies the ReferenceLinks to another ReferenceLinks. + /// + /// + public void CopyTo(ReferenceLinks target) + { + CopyTo(target, null); + } + + /// + /// Copies the ReferenceLinks to another ReferenceLinks and secures using the specified object. + /// + /// + public void CopyTo(ReferenceLinks target, ISecuredObject securedObject) + { + ArgumentUtility.CheckForNull(target, nameof(target)); + + foreach (var link in this.Links) + { + if (link.Value is IList) + { + var hrefs = link.Value as IList; + if (hrefs != null) + { + foreach (var href in hrefs) + { + target.AddLink(link.Key, href.Href, securedObject); + } + } + } + else if (link.Value is ReferenceLink) + { + var href = link.Value as ReferenceLink; + if (href != null) + { + target.AddLink(link.Key, href.Href, securedObject); + } + } + } + } + + XmlSchema IXmlSerializable.GetSchema() + { + return null; + } + + void IXmlSerializable.ReadXml(XmlReader reader) + { + XmlSerializer keySerializer = new XmlSerializer(typeof(string)); + XmlSerializer valueSerializer = new XmlSerializer(typeof(List)); + + bool wasEmpty = reader.IsEmptyElement; + reader.Read(); + + if (wasEmpty) + { + return; + } + + while (reader.NodeType != XmlNodeType.EndElement) + { + reader.ReadStartElement("item"); + + reader.ReadStartElement("key"); + var key = (string)keySerializer.Deserialize(reader); + reader.ReadEndElement(); + + reader.ReadStartElement("value"); + var value = (List)valueSerializer.Deserialize(reader); + reader.ReadEndElement(); + + if (value.Count == 1) + { + referenceLinks.Add(key, value[0]); + } + else if (value.Count > 1) + { + referenceLinks.Add(key, value); + } + + reader.ReadEndElement(); + reader.MoveToContent(); + } + reader.ReadEndElement(); + } + + void IXmlSerializable.WriteXml(XmlWriter writer) + { + XmlSerializer keySerializer = new XmlSerializer(typeof(string)); + XmlSerializer valueSerializer = new XmlSerializer(typeof(List)); + + foreach (var item in this.referenceLinks) + { + writer.WriteStartElement("item"); + + writer.WriteStartElement("key"); + keySerializer.Serialize(writer, item.Key); + writer.WriteEndElement(); + + writer.WriteStartElement("value"); + var links = item.Value as List; + if (links == null) + { + links = new List() + { + (ReferenceLink)item.Value + }; + } + + valueSerializer.Serialize(writer, links); + writer.WriteEndElement(); + + writer.WriteEndElement(); + } + } + + /// + /// The readonly view of the links. Because Reference links are readonly, + /// we only want to expose them as read only. + /// + public IReadOnlyDictionary Links + { + get + { + return new ReadOnlyDictionary(referenceLinks); + } + } + + /// + /// The json converter to represent the reference links as a dictionary. + /// + private class ReferenceLinksConverter : VssSecureJsonConverter + { + public override bool CanConvert(Type objectType) + { + return (objectType == typeof(ReferenceLinks)); + } + + /// + /// Because ReferenceLinks is a dictionary of either a single + /// ReferenceLink or an array of ReferenceLinks, we need custom + /// deserialization to correctly rebuild the dictionary. + /// + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + var unresolvedLinks = serializer.Deserialize>(reader); + if (unresolvedLinks == null) + { + return null; + } + + var links = new Dictionary(); + foreach (var entry in unresolvedLinks) + { + if (String.IsNullOrEmpty(entry.Key)) + { + throw new JsonSerializationException(WebApiResources.InvalidReferenceLinkFormat()); + } + + JToken token = entry.Value as JToken; + if (token != null) + { + switch (token.Type) + { + case JTokenType.Array: + using (var tokenReader = token.CreateReader()) + { + links[entry.Key] = serializer.Deserialize>(tokenReader); + } + break; + + case JTokenType.Object: + using (var tokenReader = token.CreateReader()) + { + links[entry.Key] = serializer.Deserialize(tokenReader); + } + break; + + default: + throw new JsonSerializationException(WebApiResources.InvalidReferenceLinkFormat()); + } + } + else if (entry.Value is ReferenceLink || entry.Value is IList) + { + links[entry.Key] = entry.Value; + } + else + { + throw new JsonSerializationException(WebApiResources.InvalidReferenceLinkFormat()); + } + } + + return new ReferenceLinks { referenceLinks = links }; + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + serializer.Serialize(writer, ((ReferenceLinks)value).referenceLinks); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Users/UpdateUserParameters.cs b/src/Sdk/WebApi/WebApi/Contracts/Users/UpdateUserParameters.cs new file mode 100644 index 00000000000..e74dfdd8a65 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Users/UpdateUserParameters.cs @@ -0,0 +1,181 @@ +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using System; +using System.Runtime.Serialization; +using System.Linq; +using System.Globalization; + +namespace GitHub.Services.Users +{ + /// + /// Used for updating a user's data. + /// + [DataContract] + public class UpdateUserParameters + { + /// + /// Creates a new instance of an UpdateUserParameters object. + /// + public UpdateUserParameters() + { + this.Properties = new PropertiesCollection(); + } + + public UpdateUserParameters(UpdateUserParameters copy) + { + Descriptor = copy.Descriptor; + Properties = new PropertiesCollection(copy.Properties); + LastModified = copy.LastModified; + Revision = copy.Revision; + } + + /// + /// The user's unique identifier, and the primary means by which the user is referenced. + /// + [IgnoreDataMember] + public SubjectDescriptor Descriptor { get; set; } + + /// + /// The collection of properties to set. See "User" for valid fields. + /// + [DataMember(IsRequired = true, EmitDefaultValue = false)] + public PropertiesCollection Properties + { + get; private set; + } + + /// + /// The user's name, as displayed throughout the product. + /// + [IgnoreDataMember] + public String DisplayName + { + set { this.Properties[nameof(DisplayName)] = value; } + get { return this.Properties.GetValue(nameof(DisplayName), defaultValue: null); } + } + + /// + /// The user's preferred email address. + /// + [IgnoreDataMember] + public String Mail + { + set { this.Properties[nameof(Mail)] = value; } + get { return this.Properties.GetValue(nameof(Mail), defaultValue: null); } + } + + /// + /// The user's preferred email address which has not yet been confirmed. + /// + [IgnoreDataMember] + public String UnconfirmedMail + { + set { this.Properties[nameof(UnconfirmedMail)] = value; } + get { return this.Properties.GetValue(nameof(UnconfirmedMail), defaultValue: null); } + } + + /// + /// The user's country of residence or association. + /// + [IgnoreDataMember] + public String Country + { + set { this.Properties[nameof(Country)] = value; } + get { return this.Properties.GetValue(nameof(Country), defaultValue: null); } + } + + /// + /// The region in which the user resides or is associated. + /// + [IgnoreDataMember] + public String Region + { + set { this.Properties[nameof(Region)] = value; } + get { return this.Properties.GetValue(nameof(Region), defaultValue: null); } + } + + /// + /// A short blurb of "about me"-style text. + /// + [IgnoreDataMember] + public String Bio + { + set { this.Properties[nameof(Bio)] = value; } + get { return this.Properties.GetValue(nameof(Bio), defaultValue: null); } + } + + /// + /// A link to an external blog. + /// + [IgnoreDataMember] + public String Blog + { + set { this.Properties[nameof(Blog)] = value; } + get { return this.Properties.GetValue(nameof(Blog), defaultValue: null); } + } + + /// + /// The company at which the user is employed. + /// + [IgnoreDataMember] + public String Company + { + set { this.Properties[nameof(Company)] = value; } + get { return this.Properties.GetValue(nameof(Company), defaultValue: null); } + } + + /// + /// The date/time at which the user data was last modified. + /// + [IgnoreDataMember] + internal DateTimeOffset LastModified { get; set; } + + /// + /// The user data revision, for change tracking. + /// + [IgnoreDataMember] + internal Int32 Revision { get; set; } + + internal UpdateUserParameters Clone() + { + UpdateUserParameters clone = new UpdateUserParameters(); + + clone.Descriptor = this.Descriptor; + clone.Properties = new PropertiesCollection(this.Properties); + clone.Revision = this.Revision; + + return clone; + } + + internal virtual User ToUser() + { + User user = new User + { + Descriptor = this.Descriptor, + LastModified = this.LastModified, + Revision = this.Revision, + }; + + user.UpdateWith(this); + + return user; + } + + public override string ToString() + { + return String.Format( + CultureInfo.InvariantCulture, + @"UpdateUserParameters +[ +Descriptor: {0} +Revision: {1} +LastModified: {2} +{3} +]", + this.Descriptor, + this.Revision, + this.LastModified, + String.Join("\r\n", Properties.Select(kvp => $"{kvp.Key}:{kvp.Value}"))); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Contracts/Users/User.cs b/src/Sdk/WebApi/WebApi/Contracts/Users/User.cs new file mode 100644 index 00000000000..0c529d45672 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Contracts/Users/User.cs @@ -0,0 +1,187 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Users +{ + [DataContract] + public class User + { + public User() + { + } + + public User(User copy) + { + Descriptor = copy.Descriptor; + UserName = copy.UserName; + DisplayName = copy.DisplayName; + Mail = copy.Mail; + UnconfirmedMail = copy.UnconfirmedMail; + Bio = copy.Bio; + Blog = copy.Blog; + Company = copy.Company; + Country = copy.Country; + DateCreated = copy.DateCreated; + Links = copy.Links; + LastModified = copy.LastModified; + Revision = copy.Revision; + State = copy.State; + } + + /// + /// The user's unique identifier, and the primary means by which the user is referenced. + /// + [DataMember(IsRequired = true)] + public SubjectDescriptor Descriptor { get; set; } + + /// + /// The unique name of the user. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String UserName { get; set; } + + /// + /// The user's name, as displayed throughout the product. + /// + [DataMember(IsRequired = false)] + public String DisplayName { get; set; } + + /// + /// The user's preferred email address. + /// + [DataMember(IsRequired = false)] + public String Mail { get; set; } + + /// + /// The user's preferred email address which has not yet been confirmed. + /// Do not use this as an email destination, instead prefer the + /// confirmed email address + /// + [DataMember(IsRequired = false)] + public String UnconfirmedMail { get; set; } + + /// + /// A short blurb of "about me"-style text. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Bio { get; set; } + + /// + /// A link to an external blog. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Blog { get; set; } + + /// + /// The company at which the user is employed. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public String Company { get; set; } + + /// + /// The user's country of residence or association. + /// + [DataMember(IsRequired = false)] + public String Country { get; set; } + + /// + /// The date the user was created in the system + /// + [DataMember(IsRequired = false)] + public DateTimeOffset DateCreated { get; set; } + + /// + /// A set of readonly links for obtaining more info about the user. + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public ReferenceLinks Links { get; internal set; } + + /// + /// The date/time at which the user data was last modified. + /// + [DataMember(IsRequired = false)] + public DateTimeOffset LastModified { get; internal set; } + + /// + /// The attribute's revision, for change tracking. + /// + [DataMember(IsRequired = false)] + public Int32 Revision { get; internal set; } + + /// + /// The status of the user + /// + [DataMember(IsRequired = false, EmitDefaultValue = false)] + public UserState State { get; internal set; } + + /// + /// Enumeration for the user status + /// + [DataContract] + public enum UserState + { + Wellformed=0, + PendingProfileCreation, + Deleted, + } + + public static implicit operator UpdateUserParameters(User user) + { + return new UpdateUserParameters + { + Descriptor = user.Descriptor, + DisplayName = user.DisplayName, + Mail = user.Mail, + UnconfirmedMail = user.UnconfirmedMail, + Country = user.Country, + Bio = user.Bio, + Blog = user.Blog, + Company = user.Company, + LastModified = user.LastModified, + Revision = user.Revision, + }; + } + + internal virtual void UpdateWith(UpdateUserParameters userParameters) + { + ArgumentUtility.CheckForNull(userParameters, nameof(userParameters)); + + foreach (String propertyName in userParameters.Properties.Keys) + { + String value = userParameters.Properties[propertyName] as String; + switch (propertyName) + { + case (nameof(DisplayName)): + DisplayName = value; + break; + + case (nameof(Mail)): + Mail = value; + break; + + case (nameof(UnconfirmedMail)): + UnconfirmedMail = value; + break; + + case (nameof(Country)): + Country = value; + break; + + case (nameof(Bio)): + Bio = value; + break; + + case (nameof(Blog)): + Blog = value; + break; + + case (nameof(Company)): + Company = value; + break; + } + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/CommonRestExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/CommonRestExceptions.cs new file mode 100644 index 00000000000..351e9be7fd6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/CommonRestExceptions.cs @@ -0,0 +1,76 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi.Exceptions +{ + [Serializable] + [ExceptionMapping("0.0", "3.0", "MissingRequiredParameterException", "GitHub.Services.WebApi.Exceptions.MissingRequiredParameterException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class MissingRequiredParameterException : VssServiceException + { + public MissingRequiredParameterException() + { + } + + public MissingRequiredParameterException(string message) + : base(message) + { + } + + public MissingRequiredParameterException(string message, Exception innerException) + : base(message, innerException) + { + } + + public MissingRequiredParameterException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class MissingRequiredHeaderException : VssServiceException + { + public MissingRequiredHeaderException() + { + } + + public MissingRequiredHeaderException(string message) + : base(message) + { + } + + public MissingRequiredHeaderException(string message, Exception innerException) + : base(message, innerException) + { + } + + public MissingRequiredHeaderException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class MultipleHeaderValuesException : VssServiceException + { + public MultipleHeaderValuesException() + { + } + + public MultipleHeaderValuesException(string message) + : base(message) + { + } + + public MultipleHeaderValuesException(string message, Exception innerException) + : base(message, innerException) + { + } + + public MultipleHeaderValuesException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/FileContainerExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/FileContainerExceptions.cs new file mode 100644 index 00000000000..9381ced29ba --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/FileContainerExceptions.cs @@ -0,0 +1,360 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.FileContainer +{ + [Serializable] + [SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")] + [ExceptionMapping("0.0", "3.0", "FileContainerException", "GitHub.Services.FileContainer.FileContainerException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public abstract class FileContainerException : VssServiceException + { + public FileContainerException() + { + EventId = VssEventId.FileContainerBaseEventId; + } + + public FileContainerException(String message) + : base(message) + { + EventId = VssEventId.FileContainerBaseEventId; + } + + public FileContainerException(String message, Exception ex) + : base(message, ex) + { + EventId = VssEventId.FileContainerBaseEventId; + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ArtifactUriNotSupportedException", "GitHub.Services.FileContainer.ArtifactUriNotSupportedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ArtifactUriNotSupportedException : FileContainerException + { + public ArtifactUriNotSupportedException(Uri artifactUri) : + base(FileContainerResources.ArtifactUriNotSupportedException(artifactUri)) + { + } + + public ArtifactUriNotSupportedException(String message) : + base(message) + { + } + + public ArtifactUriNotSupportedException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerNotFoundException", "GitHub.Services.FileContainer.ContainerNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ContainerNotFoundException : FileContainerException + { + public ContainerNotFoundException() : + base() + { + } + + public ContainerNotFoundException(Int64 containerId) : + base(FileContainerResources.ContainerNotFoundException(containerId)) + { + } + + public ContainerNotFoundException(String message) : + base(message) + { + } + + public ContainerNotFoundException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemNotFoundException", "GitHub.Services.FileContainer.ContainerItemNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemNotFoundException : FileContainerException + { + public ContainerItemNotFoundException() : + base() + { + } + + public ContainerItemNotFoundException(Int64 containerId, String path) : + base(FileContainerResources.ContainerItemNotFoundException(path, containerId)) + { + } + + public ContainerItemNotFoundException(ContainerItemType itemType, String existingPath) + : base(FileContainerResources.ContainerItemDoesNotExist(existingPath, itemType)) + { + } + + public ContainerItemNotFoundException(String message) : + base(message) + { + } + + public ContainerItemNotFoundException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerWriteAccessDeniedException", "GitHub.Services.FileContainer.ContainerWriteAccessDeniedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ContainerWriteAccessDeniedException : FileContainerException + { + public ContainerWriteAccessDeniedException(String message) : + base(message) + { + } + + public ContainerWriteAccessDeniedException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemExistsException", "GitHub.Services.FileContainer.ContainerItemExistsException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemExistsException : FileContainerException + { + public ContainerItemExistsException(ContainerItemType itemType, String existingPath) + : base(FileContainerResources.ContainerItemWithDifferentTypeExists(itemType, existingPath)) + { + } + + public ContainerItemExistsException(String message) : + base(message) + { + } + + public ContainerItemExistsException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemCopyTargetChildOfSourceException", "GitHub.Services.FileContainer.ContainerItemCopyTargetChildOfSourceException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemCopyTargetChildOfSourceException : FileContainerException + { + public ContainerItemCopyTargetChildOfSourceException(String targetPath, String sourcePath) + : base(FileContainerResources.ContainerItemCopyTargetChildOfSource(targetPath, sourcePath)) + { + } + + public ContainerItemCopyTargetChildOfSourceException(String message) : + base(message) + { + } + + public ContainerItemCopyTargetChildOfSourceException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemCopySourcePendingUploadException", "GitHub.Services.FileContainer.ContainerItemCopySourcePendingUploadException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemCopySourcePendingUploadException : FileContainerException + { + public ContainerItemCopySourcePendingUploadException(String sourcePath) + : base(FileContainerResources.ContainerItemCopySourcePendingUpload(sourcePath)) + { + } + + public ContainerItemCopySourcePendingUploadException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemCopyDuplicateTargetsException", "GitHub.Services.FileContainer.ContainerItemCopyDuplicateTargetsException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemCopyDuplicateTargetsException : FileContainerException + { + public ContainerItemCopyDuplicateTargetsException(String targetPath) + : base(FileContainerResources.ContainerItemCopyDuplicateTargets(targetPath)) + { + } + + public ContainerItemCopyDuplicateTargetsException(String message, Exception ex) : + base(message, ex) + { + } +} + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "PendingUploadNotFoundException", "GitHub.Services.FileContainer.PendingUploadNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class PendingUploadNotFoundException : FileContainerException + { + public PendingUploadNotFoundException(Int32 uploadId) : + base(FileContainerResources.PendingUploadNotFoundException(uploadId)) + { + } + + public PendingUploadNotFoundException(String message) : + base(message) + { + } + + public PendingUploadNotFoundException(String message, Exception ex) : + base(message, ex) + { + } +} + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerAlreadyExistsException", "GitHub.Services.FileContainer.ContainerAlreadyExistsException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerAlreadyExistsException : FileContainerException + { + public ContainerAlreadyExistsException(String artifactUri) + : base(FileContainerResources.ContainerAlreadyExists(artifactUri)) + { + } + + public ContainerAlreadyExistsException(String message, Exception ex) : + base(message, ex) + { + } +} + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerUnexpectedContentTypeException", "GitHub.Services.FileContainer.ContainerUnexpectedContentTypeException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerUnexpectedContentTypeException : FileContainerException + { + public ContainerUnexpectedContentTypeException(String expectedContent, String actualContent) + : base(FileContainerResources.UnexpectedContentType(expectedContent, actualContent)) + { + } + + public ContainerUnexpectedContentTypeException(String message) : + base(message) + { + } + + public ContainerUnexpectedContentTypeException(String message, Exception ex) : + base(message, ex) + { + } +} + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerNoContentException", "GitHub.Services.FileContainer.ContainerNoContentException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerNoContentException : FileContainerException + { + public ContainerNoContentException() + : base(FileContainerResources.NoContentReturned()) + { + } + + public ContainerNoContentException(String message) : + base(message) + { + } + + public ContainerNoContentException(String message, Exception ex) : + base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemContentException", "GitHub.Services.FileContainer.ContainerItemContentException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemContentException : FileContainerException + { + public ContainerItemContentException() + : base(FileContainerResources.NoContentReturned()) + { + } + + public ContainerItemContentException(String message) : + base(message) + { + } + + public ContainerItemContentException(String message, Exception ex) : + base(message, ex) + { + } + } + + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerContentIdCollisionException", "GitHub.Services.FileContainer.ContainerContentIdCollisionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerContentIdCollisionException : FileContainerException + { + public ContainerContentIdCollisionException(String fileId1, String length1, String fileId2, String length2) + : base(FileContainerResources.ContentIdCollision(fileId1, length1, fileId2, length2)) + { + } + + public ContainerContentIdCollisionException(String message) : + base(message) + { + } + + public ContainerContentIdCollisionException(String message, Exception ex) : + base(message, ex) + { + } + } + + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemCreateDuplicateItemException", "GitHub.Services.FileContainer.ContainerItemCreateDuplicateItemException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemCreateDuplicateItemException : FileContainerException + { + public ContainerItemCreateDuplicateItemException(String targetPath) + : base(FileContainerResources.ContainerItemCopyDuplicateTargets(targetPath)) + { + } + public ContainerItemCreateDuplicateItemException(String message, Exception ex) : + base(message, ex) + { + } + } + + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerDeleteFailedException", "GitHub.Services.FileContainer.ContainerDeleteFailedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerDeleteFailedException : FileContainerException + { + public ContainerDeleteFailedException(String targetContainerPath) + : base(targetContainerPath) + { + } + } + + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ContainerItemUpdateFailedException", "GitHub.Services.FileContainer.ContainerItemUpdateFailedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public sealed class ContainerItemUpdateFailedException : FileContainerException + { + public ContainerItemUpdateFailedException(String targetPath) + : base(targetPath) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/GraphExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/GraphExceptions.cs new file mode 100644 index 00000000000..a606584aa24 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/GraphExceptions.cs @@ -0,0 +1,469 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.Identity; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Graph +{ + [Serializable] + public class GraphException : VssServiceException + { + public GraphException() + { } + + public GraphException(string message) + : base(message) + { } + + public GraphException(string message, Exception innerException) + : base(message, innerException) + { } + + protected GraphException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + } + + #region Common Exceptions + + [Serializable] + public class GraphBadRequestException : GraphException + { + protected GraphBadRequestException() + { + } + + public GraphBadRequestException(string message) + : base(message) + { + } + + public GraphBadRequestException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected GraphBadRequestException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvalidGraphMemberIdException : GraphException + { + protected InvalidGraphMemberIdException() + { + } + + public InvalidGraphMemberIdException(string message) + : base(message) + { + } + + public InvalidGraphMemberIdException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected InvalidGraphMemberIdException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class GraphSubjectNotFoundException : GraphException + { + protected GraphSubjectNotFoundException() + { + } + + public GraphSubjectNotFoundException(string message) + : base(message) + { + } + + public GraphSubjectNotFoundException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected GraphSubjectNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public GraphSubjectNotFoundException(SubjectDescriptor subjectDescriptor) + : base(GraphResources.GraphSubjectNotFound(subjectDescriptor.ToString())) + { + } + + public GraphSubjectNotFoundException(Guid id) + : base(IdentityResources.IdentityNotFoundWithTfid(id)) + { + } + } + + [Serializable] + public class GraphMemberNotFoundException : GraphException + { + protected GraphMemberNotFoundException() + { + } + + public GraphMemberNotFoundException(string message) + : base(message) + { + } + + public GraphMemberNotFoundException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected GraphMemberNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public GraphMemberNotFoundException(SubjectDescriptor subjectDescriptor, SubjectDescriptor containerDescriptor) + : base(GraphResources.GraphMembershipNotFound(subjectDescriptor.ToString(), containerDescriptor.ToString())) + { + } + } + + [Serializable] + public class GraphMembershipNotFoundException : GraphException + { + protected GraphMembershipNotFoundException() + { + } + + public GraphMembershipNotFoundException(string message) + : base(message) + { + } + + public GraphMembershipNotFoundException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected GraphMembershipNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public GraphMembershipNotFoundException(SubjectDescriptor subjectDescriptor, SubjectDescriptor containerDescriptor) + : base(GraphResources.GraphMembershipNotFound(subjectDescriptor.ToString(), containerDescriptor.ToString())) + { + } + } + + [Serializable] + public class GraphApiUnavailableException : GraphException + { + protected GraphApiUnavailableException() + { + } + + public GraphApiUnavailableException(string message) + : base(message) + { + } + + public GraphApiUnavailableException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected GraphApiUnavailableException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public GraphApiUnavailableException(SubjectDescriptor subjectDescriptor) + : base(IdentityResources.IdentityNotFoundWithDescriptor(subjectDescriptor.SubjectType, subjectDescriptor.Identifier)) + { + } + + public GraphApiUnavailableException(Guid id) + : base(IdentityResources.IdentityNotFoundWithTfid(id)) + { + } + } + + #endregion + + [Serializable] + public class GraphProviderInfoApiUnavailableException : GraphException + { + protected GraphProviderInfoApiUnavailableException() + { + } + + public GraphProviderInfoApiUnavailableException(string message) + : base(message) + { + } + + public GraphProviderInfoApiUnavailableException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected GraphProviderInfoApiUnavailableException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public GraphProviderInfoApiUnavailableException(SubjectDescriptor subjectDescriptor) + : base(IdentityResources.IdentityNotFoundWithDescriptor(subjectDescriptor.SubjectType, subjectDescriptor.Identifier)) + { + } + + public GraphProviderInfoApiUnavailableException(Guid id) + : base(IdentityResources.IdentityNotFoundWithTfid(id)) + { + } + } + + [Serializable] + public class SubjectDescriptorNotFoundException : GraphException + { + public SubjectDescriptorNotFoundException() + { } + + public SubjectDescriptorNotFoundException(string message) + : base(message) + { } + + public SubjectDescriptorNotFoundException(string message, Exception innerException) + : base(message, innerException) + { } + + protected SubjectDescriptorNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + + public SubjectDescriptorNotFoundException(Guid storageKey) + : base(GraphResources.SubjectDescriptorNotFoundWithStorageKey(storageKey)) + { + } + + public SubjectDescriptorNotFoundException(IdentityDescriptor identityDescriptor) + : base(GraphResources.SubjectDescriptorNotFoundWithIdentityDescriptor(identityDescriptor)) + { + } + } + + [Serializable] + public class StorageKeyNotFoundException : GraphException + { + public StorageKeyNotFoundException() + { } + + public StorageKeyNotFoundException(string message) + : base(message) + { } + + public StorageKeyNotFoundException(string message, Exception innerException) + : base(message, innerException) + { } + + protected StorageKeyNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + + public StorageKeyNotFoundException(SubjectDescriptor descriptor) + : base(GraphResources.StorageKeyNotFound(descriptor)) + { + } + } + + [Serializable] + public class InvalidGetDescriptorRequestException : GraphException + { + public InvalidGetDescriptorRequestException() + { } + + public InvalidGetDescriptorRequestException(string message) + : base(message) + { } + + public InvalidGetDescriptorRequestException(string message, Exception innerException) + : base(message, innerException) + { } + + protected InvalidGetDescriptorRequestException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + + public InvalidGetDescriptorRequestException(Guid id) + : base(IdentityResources.InvalidGetDescriptorRequestWithLocalId(id)) + { + } + } + + [Serializable] + public class TooManyRequestedItemsException : GraphException + { + /// + /// Gets the count of the requested items. + /// Note: the value can be null based on whether the message disclose the limit. + /// + [DataMember] + public int? RequestedCount { get; set; } + + /// + /// Gets max limit for the requested items. + /// Note: the value can be null based on whether the message disclose the limit. + /// + [DataMember] + public int? MaxLimit { get; set; } + + public TooManyRequestedItemsException() + : base(IdentityResources.TooManyRequestedItemsError()) + { } + + public TooManyRequestedItemsException(string message) + : base(message) + { } + + public TooManyRequestedItemsException(string message, Exception innerException) + : base(message, innerException) + { } + + protected TooManyRequestedItemsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + + public TooManyRequestedItemsException(int providedCount, int maxCount) + : base(IdentityResources.TooManyRequestedItemsErrorWithCount(providedCount, maxCount)) + { + this.RequestedCount = providedCount; + this.MaxLimit = maxCount; + } + } + + [Serializable] + public class InvalidGraphRequestException : GraphException + { + public InvalidGraphRequestException() + { } + + public InvalidGraphRequestException(string message) + : base(message) + { } + + public InvalidGraphRequestException(string message, Exception innerException) + : base(message, innerException) + { } + + protected InvalidGraphRequestException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + } + + [Serializable] + public class CannotEditChildrenOfNonGroupException : GraphException + { + public CannotEditChildrenOfNonGroupException() + { } + + public CannotEditChildrenOfNonGroupException(string message) + : base(message) + { } + + public CannotEditChildrenOfNonGroupException(string message, Exception innerException) + : base(message, innerException) + { } + + protected CannotEditChildrenOfNonGroupException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + + public CannotEditChildrenOfNonGroupException(SubjectDescriptor subjectDescriptor) + : base(GraphResources.CannotEditChildrenOfNonGroup(subjectDescriptor.ToString())) + { + } + } + + [Serializable] + public class InvalidSubjectTypeException : GraphException + { + public InvalidSubjectTypeException() + { } + + public InvalidSubjectTypeException(string message) + : base(message) + { } + + public InvalidSubjectTypeException(string message, Exception innerException) + : base(message, innerException) + { } + + protected InvalidSubjectTypeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + } + + [Serializable] + public class GraphAccountNameCollisionRepairUnsafeException : GraphException + { + public GraphAccountNameCollisionRepairUnsafeException() + { } + + public GraphAccountNameCollisionRepairUnsafeException(string message) + : base(message) + { } + + public GraphAccountNameCollisionRepairUnsafeException(string message, Exception innerException) + : base(message, innerException) + { } + + protected GraphAccountNameCollisionRepairUnsafeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + } + + [Serializable] + public class GraphAccountNameCollisionRepairFailedException : GraphException + { + public GraphAccountNameCollisionRepairFailedException() + { } + + public GraphAccountNameCollisionRepairFailedException(string message) + : base(message) + { } + + public GraphAccountNameCollisionRepairFailedException(string message, Exception innerException) + : base(message, innerException) + { } + + protected GraphAccountNameCollisionRepairFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + } + + [Serializable] + public class CannotUpdateWellKnownGraphGroupException : GraphException + { + public CannotUpdateWellKnownGraphGroupException() + { } + + public CannotUpdateWellKnownGraphGroupException(string message) + : base(message) + { } + + public CannotUpdateWellKnownGraphGroupException(string message, Exception innerException) + : base(message, innerException) + { } + + protected CannotUpdateWellKnownGraphGroupException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/IdentityExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/IdentityExceptions.cs new file mode 100644 index 00000000000..12db542e72f --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/IdentityExceptions.cs @@ -0,0 +1,1533 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Globalization; +using System.Runtime.Serialization; +using System.Text; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Identity +{ + [Serializable] + [ExceptionMapping("0.0", "3.0", "IdentityServiceException", "GitHub.Services.Identity.IdentityServiceException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityServiceException : VssServiceException + { + public IdentityServiceException() + { + EventId = VssEventId.VssIdentityServiceException; + } + + public IdentityServiceException(string message) + : base(message) + { + EventId = VssEventId.VssIdentityServiceException; + } + + public IdentityServiceException(string message, Exception innerException) + : base(message, innerException) + { + EventId = VssEventId.VssIdentityServiceException; + } + + protected IdentityServiceException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + EventId = VssEventId.VssIdentityServiceException; + } + } + /// + /// The group you are creating already exists, thrown by the data tier + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "GroupCreationException", "GitHub.Services.Identity.GroupCreationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class GroupCreationException : IdentityServiceException + { + public GroupCreationException(string displayName, string projectName) + : base(IdentityResources.GROUPCREATIONERROR(displayName, projectName)) + { + } + + public GroupCreationException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// IMS domain is incorrect for operation + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityDomainMismatchException", "GitHub.Services.Identity.IdentityDomainMismatchException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityDomainMismatchException : IdentityServiceException + { + public IdentityDomainMismatchException(string incorrectHost, string correctHost) + : base(IdentityResources.IDENTITYDOMAINMISMATCHERROR(incorrectHost, correctHost)) + { + } + + public IdentityDomainMismatchException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected IdentityDomainMismatchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// You are trying to add a group that is a parent group of the current group, throw + /// by the data tier + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "AddMemberCyclicMembershipException", "GitHub.Services.Identity.AddMemberCyclicMembershipException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddMemberCyclicMembershipException : IdentityServiceException + { + public AddMemberCyclicMembershipException() + { + } + + public AddMemberCyclicMembershipException(string groupName, string memberName) + : base(IdentityResources.ADDMEMBERCYCLICMEMBERSHIPERROR(groupName, memberName)) + { + } + + public AddMemberCyclicMembershipException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected AddMemberCyclicMembershipException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// You are trying to create a group scope that already exists + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "GroupScopeCreationException", "GitHub.Services.Identity.GroupScopeCreationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class GroupScopeCreationException : IdentityServiceException + { + public GroupScopeCreationException() + { + } + + public GroupScopeCreationException(String message, Exception innerException) + : base(message, innerException) + { + } + + public GroupScopeCreationException(string scopeId) + : base(IdentityResources.GROUPSCOPECREATIONERROR(scopeId)) + { + } + + protected GroupScopeCreationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Group cannot be created in the requested scope since the requested scope is not within the root scope. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + public class IncompatibleScopeException : IdentityServiceException + { + public IncompatibleScopeException() + { + } + + public IncompatibleScopeException(String message): base(message) + { + } + public IncompatibleScopeException(String message, Exception innerException) + : base(message, innerException) + { + } + + public IncompatibleScopeException(string rootScopeId, string scopeIdToCheck) + : base(IdentityResources.IncompatibleScopeError(rootScopeId, scopeIdToCheck)) + { + } + + protected IncompatibleScopeException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Trying to add a member to a group that is already a member of the group, thrown by the data tier. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "AddMemberIdentityAlreadyMemberException", "GitHub.Services.Identity.AddMemberIdentityAlreadyMemberException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddMemberIdentityAlreadyMemberException : IdentityServiceException + { + public AddMemberIdentityAlreadyMemberException(string groupName, string memberName) + : base(IdentityResources.ADDMEMBERIDENTITYALREADYMEMBERERROR(groupName, memberName)) + { + } + + public AddMemberIdentityAlreadyMemberException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected AddMemberIdentityAlreadyMemberException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveAccountOwnerFromAdminGroupException", "GitHub.Services.Identity.RemoveAccountOwnerFromAdminGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RemoveAccountOwnerFromAdminGroupException : IdentityServiceException + { + public RemoveAccountOwnerFromAdminGroupException() + : base(IdentityResources.AccountOwnerCannotBeRemovedFromGroup(IdentityResources.ProjectCollectionAdministrators())) { } + + public RemoveAccountOwnerFromAdminGroupException(string message) : base(message){ } + + public RemoveAccountOwnerFromAdminGroupException(string message, Exception innerException) : base(message, innerException) { } + } + + /// + /// You can't remove yourself from the global namespace admins group and lock yourself out of your collection/hosting account. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveSelfFromAdminGroupException", "GitHub.Services.Identity.RemoveSelfFromAdminGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RemoveSelfFromAdminGroupException : IdentityServiceException + { + public RemoveSelfFromAdminGroupException() + : base(IdentityResources.RemoveSelfFromAdminGroupError(BlockRemovingSelfFromAdminGroup)) + { + } + + public RemoveSelfFromAdminGroupException(String message, Exception innerException) + : base(message, innerException) + { + } + + private const String BlockRemovingSelfFromAdminGroup = @"/Service/Integration/Settings/BlockRemovingSelfFromAdminGroup"; + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveOrganizationAdminFromAdminGroupException", "GitHub.Services.Identity.RemoveOrganizationAdminFromAdminGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + class RemoveOrganizationAdminFromAdminGroupException : IdentityServiceException + { + public RemoveOrganizationAdminFromAdminGroupException(string message) : base(message) { } + + public RemoveOrganizationAdminFromAdminGroupException(String message, Exception innerException) : base(message, innerException) { } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveServiceAccountsFromAdminGroupException", "GitHub.Services.Identity.RemoveServiceAccountsFromAdminGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + class RemoveServiceAccountsFromAdminGroupException : IdentityServiceException + { + public RemoveServiceAccountsFromAdminGroupException(string message) : base(message) { } + public RemoveServiceAccountsFromAdminGroupException(String message, Exception innerException) : base(message, innerException) { } + } + + /// + /// Group member you are trying to delete was not a member of the group. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveGroupMemberNotMemberException", "GitHub.Services.Identity.RemoveGroupMemberNotMemberException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RemoveGroupMemberNotMemberException : IdentityServiceException + { + public RemoveGroupMemberNotMemberException(string sid) + : base(IdentityResources.REMOVEGROUPMEMBERNOTMEMBERERROR(sid)) + { + } + + public RemoveGroupMemberNotMemberException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// Thrown when an AddMemberToGroup call is made to put an identity X into group Y, but the action + /// is not legal for some reason related to identity X + /// + [Serializable] + [ExceptionMapping("0.0", "3.0", "AddGroupMemberIllegalMemberException", "GitHub.Services.Identity.AddGroupMemberIllegalMemberException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddGroupMemberIllegalMemberException : IdentityServiceException + { + public AddGroupMemberIllegalMemberException() + { + } + + public AddGroupMemberIllegalMemberException(String message) + : base(message) + { + } + + public AddGroupMemberIllegalMemberException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected AddGroupMemberIllegalMemberException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Cannot add windows identity to hosted deployment + /// + [Serializable] + [ExceptionMapping("0.0", "3.0", "AddGroupMemberIllegalWindowsIdentityException", "GitHub.Services.Identity.AddGroupMemberIllegalWindowsIdentityException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddGroupMemberIllegalWindowsIdentityException : IdentityServiceException + { + public AddGroupMemberIllegalWindowsIdentityException(Identity member) + : base(IdentityResources.ADDGROUPMEMBERILLEGALWINDOWSIDENTITY(member.DisplayName)) + { + } + + public AddGroupMemberIllegalWindowsIdentityException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// Cannot add internet identity to on premise deployment + /// + [Serializable] + [ExceptionMapping("0.0", "3.0", "AddGroupMemberIllegalInternetIdentityException", "GitHub.Services.Identity.AddGroupMemberIllegalInternetIdentityException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddGroupMemberIllegalInternetIdentityException : IdentityServiceException + { + public AddGroupMemberIllegalInternetIdentityException(Identity member) + : base(IdentityResources.ADDGROUPMEMBERILLEGALINTERNETIDENTITY(member.DisplayName)) + { + } + + public AddGroupMemberIllegalInternetIdentityException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// Trying to remove a group that doesn't exist, thrown by the data tier + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveNonexistentGroupException", "GitHub.Services.Identity.RemoveNonexistentGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RemoveNonexistentGroupException : IdentityServiceException + { + public RemoveNonexistentGroupException(string sid) + : base(IdentityResources.REMOVENONEXISTENTGROUPERROR(sid)) + { + } + + public RemoveNonexistentGroupException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// You can't remove any of the special groups: the global administrators group, the + /// service users group, the team foundation valid users group, or a project administration + /// group. Thrown by the data tier. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveSpecialGroupException", "GitHub.Services.Identity.RemoveSpecialGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RemoveSpecialGroupException : IdentityServiceException + { + public RemoveSpecialGroupException(string sid, SpecialGroupType specialType) + : base(BuildMessage(sid, specialType)) + { + } + + public RemoveSpecialGroupException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected static string BuildMessage(string sid, SpecialGroupType specialType) + { + switch (specialType) + { + case SpecialGroupType.AdministrativeApplicationGroup: + return IdentityResources.REMOVEADMINGROUPERROR(); + + case SpecialGroupType.EveryoneApplicationGroup: + return IdentityResources.REMOVEEVERYONEGROUPERROR(); + + case SpecialGroupType.ServiceApplicationGroup: + return IdentityResources.REMOVESERVICEGROUPERROR(); + + default: + return IdentityResources.REMOVESPECIALGROUPERROR(); + } + } + } + + /// + /// Group you were looking up does not exist, thrown by the data tier + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "FindGroupSidDoesNotExistException", "GitHub.Services.Identity.FindGroupSidDoesNotExistException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class FindGroupSidDoesNotExistException : IdentityServiceException + { + public FindGroupSidDoesNotExistException(string sid) + : base(IdentityResources.FINDGROUPSIDDOESNOTEXISTERROR(sid)) + { + } + + public FindGroupSidDoesNotExistException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected FindGroupSidDoesNotExistException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Group rename error, new name already in use + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "GroupRenameException", "GitHub.Services.Identity.GroupRenameException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class GroupRenameException : IdentityServiceException + { + public GroupRenameException(string displayName) + : base(IdentityResources.GROUPRENAMEERROR(displayName)) + { + } + + public GroupRenameException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// You cannot add a project group to a project group in a different project + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "AddProjectGroupProjectMismatchException", "GitHub.Services.Identity.AddProjectGroupProjectMismatchException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddProjectGroupProjectMismatchException : IdentityServiceException + { + public AddProjectGroupProjectMismatchException() + { + } + + public AddProjectGroupProjectMismatchException(string groupName, string memberName) + : base(IdentityResources.ADDPROJECTGROUPTPROJECTMISMATCHERROR(groupName, memberName)) + { + } + + public AddProjectGroupProjectMismatchException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected AddProjectGroupProjectMismatchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "AddProjectGroupToGlobalGroupException", "GitHub.Services.Identity.AddProjectGroupToGlobalGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AddProjectGroupToGlobalGroupException : IdentityServiceException + { + public AddProjectGroupToGlobalGroupException() + { + } + + public AddProjectGroupToGlobalGroupException(string globalGroupName, string projectGroupName) + : base(IdentityResources.ADDPROJECTGROUPTOGLOBALGROUPERROR(projectGroupName, globalGroupName)) + { + } + + public AddProjectGroupToGlobalGroupException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected AddProjectGroupToGlobalGroupException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Unable to locate project for the project uri passed in + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "GroupScopeDoesNotExistException", "GitHub.Services.Identity.GroupScopeDoesNotExistException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class GroupScopeDoesNotExistException : IdentityServiceException + { + [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings", MessageId = "0#")] + public GroupScopeDoesNotExistException(string projectUri) + : base(IdentityResources.GROUPSCOPEDOESNOTEXISTERROR(projectUri)) + { + } + + public GroupScopeDoesNotExistException(Guid scopeId) + : base(IdentityResources.GROUPSCOPEDOESNOTEXISTERROR(scopeId)) + { + } + + public GroupScopeDoesNotExistException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected GroupScopeDoesNotExistException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// This exception is thrown when a user tries to add a group that is + /// not an application group. We do not modify the memberships of Windows groups. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "NotApplicationGroupException", "GitHub.Services.Identity.NotApplicationGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class NotApplicationGroupException : IdentityServiceException + { + public NotApplicationGroupException() + : base(IdentityResources.NOT_APPLICATION_GROUP()) + { + } + + public NotApplicationGroupException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// You must specify a group when removing members from a group, thrown by the app tier + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ModifyEveryoneGroupException", "GitHub.Services.Identity.ModifyEveryoneGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ModifyEveryoneGroupException : IdentityServiceException + { + public ModifyEveryoneGroupException() + : base(IdentityResources.MODIFYEVERYONEGROUPEXCEPTION()) + { + } + + public ModifyEveryoneGroupException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// ReadIdentityFromSource returned null and we need an identity to continue the operation + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityNotFoundException", "GitHub.Services.Identity.IdentityNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityNotFoundException : IdentityServiceException + { + public IdentityNotFoundException() + : base(IdentityResources.IdentityNotFoundSimpleMessage()) + { + } + + public IdentityNotFoundException(String message) + : base(message) + { + } + + public IdentityNotFoundException(String message, Exception innerException) + : base(message, innerException) + { + } + + public IdentityNotFoundException(IdentityDescriptor descriptor) + : base(IdentityResources.IdentityNotFoundMessage(descriptor.IdentityType)) + { + } + + public IdentityNotFoundException(SubjectDescriptor subjectDescriptor) + : base(IdentityResources.IdentityNotFoundMessage(subjectDescriptor.SubjectType)) + { + } + + public IdentityNotFoundException(Guid tfid) + : base(IdentityResources.IdentityNotFoundWithTfid(tfid)) + { + } + } + + /// + /// Identity is not part of calling identity's directory + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + public class IdentityNotFoundInCurrentDirectoryException : IdentityServiceException + { + public IdentityNotFoundInCurrentDirectoryException() + : base(IdentityResources.IdentityNotFoundInCurrentDirectory()) + { + } + + public IdentityNotFoundInCurrentDirectoryException(String message) + : base(message) + { + } + + public IdentityNotFoundInCurrentDirectoryException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// The identity is not a service identity + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityNotServiceIdentityException", "GitHub.Services.Identity.IdentityNotServiceIdentityException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityNotServiceIdentityException : IdentityServiceException + { + public IdentityNotServiceIdentityException(String message) + : base(message) + { + } + + public IdentityNotServiceIdentityException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidServiceIdentityNameException", "GitHub.Services.Identity.InvalidServiceIdentityNameException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidServiceIdentityNameException : IdentityServiceException + { + public InvalidServiceIdentityNameException(String identityName) + : base(IdentityResources.InvalidServiceIdentityName(identityName)) + { + } + + public InvalidServiceIdentityNameException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// The identity already exists + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityAlreadyExistsException", "GitHub.Services.Identity.IdentityAlreadyExistsException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityAlreadyExistsException : IdentityServiceException + { + public IdentityAlreadyExistsException(String message) + : base(message) + { + } + + public IdentityAlreadyExistsException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// This exception is thrown when a user tries to add a distribution list + /// to a group. We only allow security groups to used. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "NotASecurityGroupException", "GitHub.Services.Identity.NotASecurityGroupException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class NotASecurityGroupException : IdentityServiceException + { + public NotASecurityGroupException(String displayName) + : base(IdentityResources.NOT_A_SECURITY_GROUP(displayName)) + { + } + + public NotASecurityGroupException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RemoveMemberServiceAccountException", "GitHub.Services.Identity.RemoveMemberServiceAccountException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RemoveMemberServiceAccountException : IdentityServiceException + { + public RemoveMemberServiceAccountException() + : base(IdentityResources.CANNOT_REMOVE_SERVICE_ACCOUNT()) + { + } + + public RemoveMemberServiceAccountException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IllegalAliasException", "GitHub.Services.Identity.IllegalAliasException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IllegalAliasException : IdentityServiceException + { + public IllegalAliasException(string name) : + base(name) + { + } + + public IllegalAliasException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IllegalIdentityException", "GitHub.Services.Identity.IllegalIdentityException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IllegalIdentityException : IdentityServiceException + { + public IllegalIdentityException(string name) : + base(IdentityResources.IllegalIdentityException(name)) + { + } + + public IllegalIdentityException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentitySyncException", "GitHub.Services.Identity.IdentitySyncException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentitySyncException : IdentityServiceException + { + public IdentitySyncException(string message, Exception innerException) : + base(IdentityResources.IDENTITY_SYNC_ERROR(message)) + { + } + } + + /// + /// Identity provider not available + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityProviderUnavailableException", "GitHub.Services.Identity.IdentityProviderUnavailableException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityProviderUnavailableException : IdentityServiceException + { + public IdentityProviderUnavailableException(IdentityDescriptor descriptor) + : base(IdentityResources.IdentityProviderUnavailable(descriptor.IdentityType, descriptor.Identifier)) + { + } + + public IdentityProviderUnavailableException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityPropertyRequiredException", "GitHub.Services.Identity.IdentityPropertyRequiredException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityPropertyRequiredException : IdentityServiceException + { + public IdentityPropertyRequiredException(String message) + : base(message) + { + } + + public IdentityPropertyRequiredException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityAccountNameAlreadyInUseException", "GitHub.Services.Identity.IdentityAccountNameAlreadyInUseException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityAccountNameAlreadyInUseException : IdentityServiceException + { + public IdentityAccountNameAlreadyInUseException(String oneAccountName, Int32 collisionCount) + : base(BuildExceptionMessage(oneAccountName, collisionCount)) + { + } + + public IdentityAccountNameAlreadyInUseException(String message, Exception innerException) + : base(message, innerException) + { + } + + private static String BuildExceptionMessage(String oneAccountName, Int32 collisionCount) + { + Debug.Assert(collisionCount > 0, "identity account name exception fired, but no collisions were found"); + + if (collisionCount == 1) + { + return IdentityResources.IdentityAccountNameAlreadyInUseError(oneAccountName); + } + else + { + return IdentityResources.IdentityAccountNamesAlreadyInUseError(collisionCount, oneAccountName); + } + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityAccountNameCollisionRepairFailedException", "GitHub.Services.Identity.IdentityAccountNameCollisionRepairFailedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityAccountNameCollisionRepairFailedException : IdentityServiceException + { + public IdentityAccountNameCollisionRepairFailedException(String accountName) + : base(IdentityResources.IdentityAccountNameCollisionRepairFailedError(accountName)) + { + } + + public IdentityAccountNameCollisionRepairFailedException(String accountName, Exception innerException) + : base(IdentityResources.IdentityAccountNameCollisionRepairFailedError(accountName), innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityAccountNameCollisionRepairUnsafeException", "GitHub.Services.Identity.IdentityAccountNameCollisionRepairUnsafeException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityAccountNameCollisionRepairUnsafeException : IdentityServiceException + { + public IdentityAccountNameCollisionRepairUnsafeException(String accountName) + : base(IdentityResources.IdentityAccountNameCollisionRepairUnsafeError(accountName)) + { + } + + public IdentityAccountNameCollisionRepairUnsafeException(String accountName, Exception innerException) + : base(IdentityResources.IdentityAccountNameCollisionRepairUnsafeError(accountName), innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityAliasAlreadyInUseException", "GitHub.Services.Identity.IdentityAliasAlreadyInUseException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityAliasAlreadyInUseException : IdentityServiceException + { + public IdentityAliasAlreadyInUseException(String conflictingAlias) + : base(IdentityResources.IdentityAliasAlreadyInUseError(conflictingAlias)) + { + } + + public IdentityAliasAlreadyInUseException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "DynamicIdentityTypeCreationNotSupportedException", "GitHub.Services.Identity.DynamicIdentityTypeCreationNotSupportedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DynamicIdentityTypeCreationNotSupportedException : IdentityServiceException + { + public DynamicIdentityTypeCreationNotSupportedException() + : base(IdentityResources.DynamicIdentityTypeCreationNotSupported()) + { + } + + public DynamicIdentityTypeCreationNotSupportedException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "TooManyIdentitiesReturnedException", "GitHub.Services.Identity.TooManyIdentitiesReturnedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TooManyIdentitiesReturnedException : IdentityServiceException + { + public TooManyIdentitiesReturnedException() + : base(IdentityResources.TooManyResultsError()) + { + } + + public TooManyIdentitiesReturnedException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "MultipleIdentitiesFoundException", "GitHub.Services.Identity.MultipleIdentitiesFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class MultipleIdentitiesFoundException : IdentityServiceException + { + public MultipleIdentitiesFoundException(string identityName, IEnumerable matchingIdentities) + : base(BuildExceptionMessage(identityName, matchingIdentities)) + { + + } + + public MultipleIdentitiesFoundException(string identityName, IEnumerable matchingIdentities) + : base(BuildExceptionMessage(identityName, matchingIdentities)) + { + + } + + public MultipleIdentitiesFoundException(string message, Exception innerException) + : base(message, innerException) + { + } + + private static string BuildExceptionMessage(string identityName, IEnumerable matchingIdentities) + { + StringBuilder builder = new StringBuilder(); + + foreach (var identity in matchingIdentities) + { + builder.AppendFormat(CultureInfo.CurrentUICulture, "- {0} ({1})", identity.ProviderDisplayName, identity.CustomDisplayName); + } + + return IdentityResources.MultipleIdentitiesFoundError(identityName, builder.ToString()); + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "HistoricalIdentityNotFoundException", "GitHub.Services.Identity.HistoricalIdentityNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class HistoricalIdentityNotFoundException : IdentityServiceException + { + public HistoricalIdentityNotFoundException() + : base(IdentityResources.TooManyResultsError()) + { + } + + public HistoricalIdentityNotFoundException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidIdentityIdTranslationException", "GitHub.Services.Identity.InvalidIdentityIdTranslationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidIdentityIdTranslationException : IdentityServiceException + { + public InvalidIdentityIdTranslationException() + : base(IdentityResources.InvalidIdentityIdTranslations()) + { + } + + public InvalidIdentityIdTranslationException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdTranslationsAreMigratedException", "GitHub.Services.Identity.IdTranslationsAreMigratedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdTranslationsAreMigratedException : IdentityServiceException + { + public IdTranslationsAreMigratedException() + : base(IdentityResources.IdentityIdTranslationsAreMigrated()) + { + } + + public IdTranslationsAreMigratedException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidIdentityStorageKeyTranslationException", "GitHub.Services.Identity.InvalidIdentityStorageKeyTranslationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidIdentityStorageKeyTranslationException : IdentityServiceException + { + public InvalidIdentityStorageKeyTranslationException() + : base(IdentityResources.InvalidIdentityKeyMaps()) + { + } + + public InvalidIdentityStorageKeyTranslationException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidIdentityKeyMapsException", "GitHub.Services.Identity.InvalidIdentityKeyMapsException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidIdentityKeyMapException : IdentityServiceException + { + public InvalidIdentityKeyMapException() + : base(IdentityResources.InvalidIdentityKeyMaps()) + { + } + + public InvalidIdentityKeyMapException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidTypeIdForIdentityStorageKeyException", "GitHub.Services.Identity.InvalidTypeIdForIdentityStorageKeyException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidTypeIdForIdentityKeyMapException : IdentityServiceException + { + public InvalidTypeIdForIdentityKeyMapException() + : base(IdentityResources.InvalidIdentityKeyMaps()) + { + } + + public InvalidTypeIdForIdentityKeyMapException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "DuplicateIdentitiesFoundException", "GitHub.Services.Identity.DuplicateIdentitiesFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DuplicateIdentitiesFoundException : IdentityServiceException + { + public DuplicateIdentitiesFoundException() + : base(IdentityResources.InvalidIdentityIdTranslations()) + { + } + + public DuplicateIdentitiesFoundException(String message) + : base(message) + { + } + + public DuplicateIdentitiesFoundException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityExpressionException", "GitHub.Services.Identity.IdentityExpressionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityExpressionException : IdentityServiceException + { + public IdentityExpressionException(String message) + : base(message) + { + } + + public IdentityExpressionException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidDisplayNameException", "GitHub.Services.Identity.InvalidDisplayNameException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidDisplayNameException : IdentityServiceException + { + public InvalidDisplayNameException(String message) + : base(message) + { + } + + public InvalidDisplayNameException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "GroupNameNotRecognizedException", "GitHub.Services.Identity.GroupNameNotRecognizedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class GroupNameNotRecognizedException : IdentityServiceException + { + public GroupNameNotRecognizedException() + { + } + + public GroupNameNotRecognizedException(string message, Exception innerException) + : base(message, innerException) + { + } + + public GroupNameNotRecognizedException(string groupName) + : this(IdentityResources.InvalidNameNotRecognized(groupName), null) + { + } + + protected GroupNameNotRecognizedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "AccountPreferencesAlreadyExistException", "GitHub.Services.Identity.AccountPreferencesAlreadyExistException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AccountPreferencesAlreadyExistException : IdentityServiceException + { + public AccountPreferencesAlreadyExistException() + : base(IdentityResources.AccountPreferencesAlreadyExist()) + { + } + + public AccountPreferencesAlreadyExistException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "IdentityMapReadOnlyException", "GitHub.Services.Identity.IdentityMapReadOnlyException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityMapReadOnlyException : IdentityServiceException + { + public IdentityMapReadOnlyException() + : this((Exception)null) + { + } + + public IdentityMapReadOnlyException(Exception innerException) + : base(IdentityResources.IdentityMapReadOnlyException(), innerException) + { + } + + public IdentityMapReadOnlyException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected IdentityMapReadOnlyException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "IdentityStoreNotAvailableException", "GitHub.Services.Identity.IdentityStoreNotAvailableException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityStoreNotAvailableException : IdentityServiceException + { + public IdentityStoreNotAvailableException() : base() { } + public IdentityStoreNotAvailableException(string errorMessage) : base(errorMessage) { } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidDisplayNameException", "GitHub.Services.Identity.InvalidChangedIdentityException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidChangedIdentityException : IdentityServiceException + { + public InvalidChangedIdentityException(String message) + : base(message) + { + } + + public InvalidChangedIdentityException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdenittyInvalidTypeIdException", "GitHub.Services.Identity.IdenittyInvalidTypeIdException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + [Obsolete("This exception has been renamed to IdentityInvalidTypeIdException")] + public class IdenittyInvalidTypeIdException : IdentityServiceException + { + public IdenittyInvalidTypeIdException(string message) : + base(message) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "IdentityInvalidTypeIdException", "GitHub.Services.Identity.IdentityInvalidTypeIdException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] +#pragma warning disable 618 + public class IdentityInvalidTypeIdException : IdenittyInvalidTypeIdException +#pragma warning restore 618 + { + public IdentityInvalidTypeIdException(string message) : + base(message) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidIdentityKeyException", "GitHub.Services.Identity.InvalidIdentityKeyException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidIdentityKeyException : IdentityServiceException + { + public InvalidIdentityKeyException() : base() { } + + public InvalidIdentityKeyException(string message) : + base(message) + { + } + } + + [Serializable] + [ExceptionMapping("0.0", "3.0", "IdentityMaterializationFailedException", "GitHub.Services.Identity.IdentityMaterializationFailedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class IdentityMaterializationFailedException : IdentityServiceException + { + public IdentityMaterializationFailedException() + { + } + + public IdentityMaterializationFailedException(string message, Exception innerException) + : base(message, innerException) + { + } + + public IdentityMaterializationFailedException(string principalName) + : this(IdentityResources.IdentityMaterializationFailedMessage(principalName), null) + { + } + + protected IdentityMaterializationFailedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class IdentityDescriptorNotFoundException : IdentityServiceException + { + public IdentityDescriptorNotFoundException() + { } + + public IdentityDescriptorNotFoundException(string message) + : base(message) + { } + + public IdentityDescriptorNotFoundException(string message, Exception innerException) + : base(message, innerException) + { } + + protected IdentityDescriptorNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { } + + public IdentityDescriptorNotFoundException(Guid id, bool isMasterId) + : base(isMasterId ? + IdentityResources.IdentityDescriptorNotFoundWithMasterId(id) : + IdentityResources.IdentityDescriptorNotFoundWithLocalId(id)) + { + } + } + + [Serializable] + public abstract class TenantSwitchException : IdentityServiceException + { + public TenantSwitchException() + { + } + + public TenantSwitchException(string message) : base(message) + { + } + + public TenantSwitchException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected TenantSwitchException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class InvitationPendingException : TenantSwitchException + { + public string AccountName { get; } + public string OrganizationName { get; } + + public InvitationPendingException() + { + } + + public InvitationPendingException(string message) + : base(message) + { + } + + public InvitationPendingException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected InvitationPendingException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public InvitationPendingException(string accountName, string organizationName) + : base(IdentityResources.InvitationPendingMessage(accountName, organizationName)) + { + AccountName = accountName; + OrganizationName = organizationName; + } + } + + [Serializable] + public class WrongWorkOrPersonalException : TenantSwitchException + { + public string AccountName { get; } + public bool ShouldBePersonal { get; } + public bool ShouldCreatePersonal { get; } + + public WrongWorkOrPersonalException() + { + } + + public WrongWorkOrPersonalException(string message) + : base(message) + { + } + + public WrongWorkOrPersonalException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected WrongWorkOrPersonalException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + public WrongWorkOrPersonalException(string accountName, bool shouldBePersonal, bool shouldCreatePersonal) + : base(GetMessage(shouldBePersonal, shouldCreatePersonal)) + { + AccountName = accountName; + ShouldBePersonal = shouldBePersonal; + ShouldCreatePersonal = shouldCreatePersonal; + } + + private static string GetMessage(bool shouldBePersonal, bool shouldCreatePersonal) + { + if (shouldBePersonal) + { + if (shouldCreatePersonal) + { + return IdentityResources.ShouldCreatePersonalAccountMessage(); + } + else + { + return IdentityResources.ShouldBePersonalAccountMessage(); + } + } + else + { + return IdentityResources.ShouldBeWorkAccountMessage(); + } + } + } + + [Serializable] + public class InvalidTransferIdentityRightsRequestException : IdentityServiceException + { + public InvalidTransferIdentityRightsRequestException() + { + } + + public InvalidTransferIdentityRightsRequestException(string message) + : base(message) + { + } + + public InvalidTransferIdentityRightsRequestException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected InvalidTransferIdentityRightsRequestException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class FailedTransferIdentityRightsException : IdentityServiceException + { + public FailedTransferIdentityRightsException() + { + } + + public FailedTransferIdentityRightsException(string message) + : base(message) + { + } + + public FailedTransferIdentityRightsException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected FailedTransferIdentityRightsException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class CollectionShardingException : IdentityServiceException + { + public CollectionShardingException() + { + } + + public CollectionShardingException(string message) + : base(message) + { + } + + public CollectionShardingException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected CollectionShardingException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + public class ScopeBadRequestException: IdentityServiceException + { + protected ScopeBadRequestException() + { + } + + public ScopeBadRequestException(string message) + : base(message) + { + } + + public ScopeBadRequestException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected ScopeBadRequestException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Indicates that a caller action triggered an attempt to read or update identity information at the deployment level + /// directly from (or using data from) a sharded host after dual writes had been disabled, meaning that the fallback is not allowed. + /// + [Serializable] + public class FallbackIdentityOperationNotAllowedException : IdentityServiceException + { + public FallbackIdentityOperationNotAllowedException() + { + } + + public FallbackIdentityOperationNotAllowedException(string message) + : base(message) + { + } + + public FallbackIdentityOperationNotAllowedException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected FallbackIdentityOperationNotAllowedException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Thrown when we were trying to create a client to talk to the legacy SPS identity store (e.g. SPS SU1), + /// but were not able to do so due to an unexpected response. + /// + [Serializable] + public class CannotFindLegacySpsIdentityStoreException : IdentityServiceException + { + public CannotFindLegacySpsIdentityStoreException() + { + } + + public CannotFindLegacySpsIdentityStoreException(string message) + : base(message) + { + } + + public CannotFindLegacySpsIdentityStoreException(string message, Exception innerException) + : base(message, innerException) + { + } + + protected CannotFindLegacySpsIdentityStoreException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Unable to restore group scope + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "RestoreGroupScopeValidationException", "GitHub.Services.Identity.RestoreGroupScopeValidationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class RestoreGroupScopeValidationException : IdentityServiceException + { + [SuppressMessage("Microsoft.Design", "CA1054:UriParametersShouldNotBeStrings", MessageId = "0#")] + public RestoreGroupScopeValidationException(string validationError) + : base(IdentityResources.RestoreGroupScopeValidationError(validationError)) + { + } + + public RestoreGroupScopeValidationException(String message, Exception innerException) + : base(message, innerException) + { + } + + protected RestoreGroupScopeValidationException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/LocationExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/LocationExceptions.cs new file mode 100644 index 00000000000..44b44cf2f88 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/LocationExceptions.cs @@ -0,0 +1,155 @@ +using GitHub.Services.Common; +using System; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.Serialization; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Location +{ + [Serializable] + [SuppressMessageAttribute("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ServiceDefinitionDoesNotExistException", "GitHub.Services.Location.ServiceDefinitionDoesNotExistException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public partial class ServiceDefinitionDoesNotExistException : VssServiceException + { + public ServiceDefinitionDoesNotExistException(String message) + : base(message) + { + } + + public ServiceDefinitionDoesNotExistException(String message, Exception ex) + : base(message, ex) + { + } + + protected ServiceDefinitionDoesNotExistException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessageAttribute("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidAccessPointException", "GitHub.Services.Location.InvalidAccessPointException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public partial class InvalidAccessPointException : VssServiceException + { + public InvalidAccessPointException(String message) + : base(message) + { + } + + public InvalidAccessPointException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidAccessPointException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessageAttribute("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidServiceDefinitionException", "GitHub.Services.Location.InvalidServiceDefinitionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public partial class InvalidServiceDefinitionException : VssServiceException + { + public InvalidServiceDefinitionException(String message) + : base(message) + { + } + + public InvalidServiceDefinitionException(String message, Exception ex) + : base(message, ex) + { + } + + protected InvalidServiceDefinitionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessageAttribute("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "CannotChangeParentDefinitionException", "GitHub.Services.Location.CannotChangeParentDefinitionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public partial class CannotChangeParentDefinitionException : VssServiceException + { + public CannotChangeParentDefinitionException() + { + } + + public CannotChangeParentDefinitionException(String message) + : base(GetMessage(message)) + { + } + + public CannotChangeParentDefinitionException(String message, Exception ex) + : base(GetMessage(message), ex) + { + } + + protected CannotChangeParentDefinitionException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + + private static String GetMessage(String message) + { +#if DEBUG + String helpText = " Are you trying to move an existing Resource Area to a different service? You will need to follow the steps at: https://vsowiki.com/index.php?title=Moving_an_existing_Resource_Area_to_a_different_Service"; + if (message != null && !message.EndsWith(helpText, StringComparison.Ordinal)) + { + message = String.Concat(message, helpText); + } +#endif + return message; + } + } + + [Serializable] + [SuppressMessageAttribute("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ParentDefinitionNotFoundException", "GitHub.Services.Location.ParentDefinitionNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public partial class ParentDefinitionNotFoundException : VssServiceException + { + public ParentDefinitionNotFoundException(String serviceType, Guid identifier, String parentServiceType, Guid serviceInstance) + : this(LocationResources.ParentDefinitionNotFound(serviceType, identifier, parentServiceType, serviceInstance)) + { + } + + public ParentDefinitionNotFoundException(String message) + : base(message) + { + } + + public ParentDefinitionNotFoundException(String message, Exception ex) + : base(message, ex) + { + } + + protected ParentDefinitionNotFoundException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + [Serializable] + [SuppressMessageAttribute("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ActionDeniedBySubscriberException", "GitHub.Services.Location.ActionDeniedBySubscriberException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public partial class ActionDeniedBySubscriberException : VssServiceException + { + public ActionDeniedBySubscriberException(String message) + : base(message) + { + } + + public ActionDeniedBySubscriberException(String message, Exception ex) + : base(message, ex) + { + } + + protected ActionDeniedBySubscriberException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/SecurityExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/SecurityExceptions.cs new file mode 100644 index 00000000000..2c67437d161 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/SecurityExceptions.cs @@ -0,0 +1,158 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Services.Identity; + +namespace GitHub.Services.Security +{ + [Serializable] + [SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")] + [ExceptionMapping("0.0", "3.0", "SecurityException", "GitHub.Services.Security.SecurityException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public abstract class SecurityException : VssServiceException + { + public SecurityException(String message) + : base(message) + { + } + + public SecurityException(String message, Exception ex) + : base(message, ex) + { + } + } + + /// + /// An exception which is thrown when a permission check fails in the security service. + /// + [Serializable] + [ExceptionMapping("0.0", "3.0", "AccessCheckException", "GitHub.Framework.Server.AccessCheckException, GitHub.Framework.Server, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class AccessCheckException : SecurityException + { + /// + /// Constructs an AccessCheckException. + /// + /// The identity descriptor which was checked. + /// The display name of the identity which was checked. + /// The token which was checked. + /// The requested permissions, which were not satisifed by the check. + /// The security namespace which was checked. + /// A descriptive message for the exception. + public AccessCheckException( + IdentityDescriptor descriptor, + String identityDisplayName, + String token, + int requestedPermissions, + Guid namespaceId, + String message) + : this(descriptor, token, requestedPermissions, namespaceId, message) + { + this.IdentityDisplayName = identityDisplayName; + } + + /// + /// Constructs an AccessCheckException. + /// + /// The identity descriptor which was checked. + /// The token which was checked. + /// The requested permissions, which were not satisifed by the check. + /// The security namespace which was checked. + /// A descriptive message for the exception. + public AccessCheckException( + IdentityDescriptor descriptor, + String token, + int requestedPermissions, + Guid namespaceId, + String message) + : base(message) + { + ArgumentUtility.CheckForNull(descriptor, nameof(descriptor)); + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(message, nameof(message)); + + this.Descriptor = descriptor; + this.Token = token; + this.RequestedPermissions = requestedPermissions; + this.NamespaceId = namespaceId; + } + + public AccessCheckException(String message) + : base(message) + { + } + + /// + /// The identity descriptor which was checked. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public IdentityDescriptor Descriptor { get; private set; } + + /// + /// The display name of the identity which was checked. + /// This property may be null. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String IdentityDisplayName { get; private set; } + + /// + /// The token which was checked. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String Token { get; private set; } + + /// + /// The permissions which were demanded. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public int RequestedPermissions { get; private set; } + + /// + /// The identifier of the security namespace which was checked. + /// + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public Guid NamespaceId { get; private set; } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidAclStoreException", "GitHub.Services.Security.InvalidAclStoreException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidAclStoreException : SecurityException + { + public InvalidAclStoreException(Guid namespaceId, Guid aclStoreId) + : this(SecurityResources.InvalidAclStoreException(namespaceId, aclStoreId)) + { + } + + public InvalidAclStoreException(String message) + : base(message) + { + } + + public InvalidAclStoreException(String message, Exception ex) + : base(message, ex) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + public class InvalidPermissionsException : SecurityException + { + public InvalidPermissionsException(Guid namespaceId, Int32 bitmask) + : this(SecurityResources.InvalidPermissionsException(namespaceId, bitmask)) + { + } + + public InvalidPermissionsException(String message) + : base(message) + { + } + + public InvalidPermissionsException(String message, Exception ex) + : base(message, ex) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Exceptions/VssApiResourceExceptions.cs b/src/Sdk/WebApi/WebApi/Exceptions/VssApiResourceExceptions.cs new file mode 100644 index 00000000000..5200a2e3d7b --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Exceptions/VssApiResourceExceptions.cs @@ -0,0 +1,186 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi +{ + /// + /// Exception thrown when the requested API resource location was not found on the server + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssResourceNotFoundException", "GitHub.Services.WebApi.VssResourceNotFoundException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssResourceNotFoundException : VssServiceException + { + public VssResourceNotFoundException(Guid locationId) + : this(WebApiResources.ResourceNotRegisteredMessage(locationId)) + { + } + + public VssResourceNotFoundException(Guid locationId, Uri serverBaseUri) + : this(WebApiResources.ResourceNotFoundOnServerMessage(locationId, serverBaseUri)) + { + } + + public VssResourceNotFoundException(String message) + : base(message) + { + } + } + + /// + /// Base exception class for api resource version exceptions + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssResourceVersionException", "GitHub.Services.WebApi.VssResourceVersionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public abstract class VssResourceVersionException : VssServiceException + { + public VssResourceVersionException(String message) + : base(message) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssInvalidApiResourceVersionException", "GitHub.Services.WebApi.VssInvalidApiResourceVersionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssInvalidApiResourceVersionException : VssResourceVersionException + { + public VssInvalidApiResourceVersionException(String apiResourceVersionString) + : base(WebApiResources.InvalidApiVersionStringMessage(apiResourceVersionString)) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssApiResourceDuplicateIdException", "GitHub.Services.WebApi.VssApiResourceDuplicateIdException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssApiResourceDuplicateIdException: VssApiRouteRegistrationException + { + public VssApiResourceDuplicateIdException(Guid locationId) + : base(WebApiResources.ApiResourceDuplicateIdMessage(locationId)) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssApiResourceDuplicateRouteNameException", "GitHub.Services.WebApi.VssApiResourceDuplicateRouteNameException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssApiResourceDuplicateRouteNameException : VssApiRouteRegistrationException + { + public VssApiResourceDuplicateRouteNameException(string routeName) + : base(WebApiResources.ApiResourceDuplicateRouteNameMessage(routeName)) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssApiRouteRegistrationException", "GitHub.Services.WebApi.VssApiRouteRegistrationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public abstract class VssApiRouteRegistrationException : VssResourceVersionException + { + public VssApiRouteRegistrationException(string message) + : base(message) + { + } + } + + /// + /// Exception thrown when the requested version of a resource is not supported on the server + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssVersionNotSupportedException", "GitHub.Services.WebApi.VssVersionNotSupportedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssVersionNotSupportedException : VssResourceVersionException + { + public VssVersionNotSupportedException(ApiResourceLocation location, Version requestedVersion, Version minSupportedVersion, Uri serverBaseUri) + : base(WebApiResources.ClientResourceVersionNotSupported(location.Area + ":" + location.ResourceName + " " + location.Id, requestedVersion, serverBaseUri, minSupportedVersion)) + { + } + } + + /// + /// Exception thrown when the requested version of a resource is greater than the latest api version the server supports. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + public class VssVersionOutOfRangeException : VssResourceVersionException + { + public VssVersionOutOfRangeException(Version requestedVersion, Version maxSupportedVersion) + : base(WebApiResources.ApiVersionOutOfRange(requestedVersion, maxSupportedVersion)) + { + } + + public VssVersionOutOfRangeException(ApiResourceVersion requestedApiVersion, string routeMatchedExceptVersion) + : base(WebApiResources.ApiVersionOutOfRangeForRoute(requestedApiVersion, routeMatchedExceptVersion)) + { + } + + public VssVersionOutOfRangeException(ApiResourceVersion requestedApiVersion, IEnumerable routesMatchedExceptVersion) + : base(WebApiResources.ApiVersionOutOfRangeForRoutes(requestedApiVersion, string.Join(", ", routesMatchedExceptVersion))) + { + } + + public VssVersionOutOfRangeException(string message) + : base(message) + { + } + } + + /// + /// Exception thrown when the api-version is not supplied for a particular type of request + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssVersionNotSpecifiedException", "GitHub.Services.WebApi.VssVersionNotSpecifiedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssVersionNotSpecifiedException : VssResourceVersionException + { + public VssVersionNotSpecifiedException(String httpMethod) + : base(WebApiResources.VersionNotSuppliedMessage(httpMethod)) + { + } + } + + /// + /// Exception thrown when the requested version of a resource is a "preview" api, but -preview is not supplied in the request's api-version + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssInvalidPreviewVersionException", "GitHub.Services.WebApi.VssInvalidPreviewVersionException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssInvalidPreviewVersionException : VssResourceVersionException + { + public VssInvalidPreviewVersionException(ApiResourceVersion requestedVersion) + : base(WebApiResources.PreviewVersionNotSuppliedMessage(requestedVersion.ToString())) + { + } + } + + /// + /// Exception thrown when a request body's contentType is not supported by a given controller. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssRequestContentTypeNotSupportedException", "GitHub.Services.WebApi.VssRequestContentTypeNotSupportedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssRequestContentTypeNotSupportedException : VssServiceException + { + public VssRequestContentTypeNotSupportedException(String contentType, String httpMethod, IEnumerable validContentTypes) + : base(WebApiResources.RequestContentTypeNotSupported(contentType, httpMethod, String.Join(", ", validContentTypes))) + { + } + } + + /// + /// Exception thrown when a cross-origin request is made using cookie-based authentication from an unsafe domain. + /// + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + public class VssApiUnsafeCrossOriginRequestException : VssServiceException + { + public VssApiUnsafeCrossOriginRequestException(String origin) + : base(WebApiResources.UnsafeCrossOriginRequest(origin)) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/HttpClients/Constants.cs b/src/Sdk/WebApi/WebApi/HttpClients/Constants.cs new file mode 100644 index 00000000000..c93c64057fc --- /dev/null +++ b/src/Sdk/WebApi/WebApi/HttpClients/Constants.cs @@ -0,0 +1,123 @@ +using System; +using System.ComponentModel; + +namespace GitHub.Services +{ + internal static class QueryParameters + { + //Common query parameters + internal const String Properties = "properties"; + + //Account query parameters + internal const String CreatorId = "creatorId"; + internal const String OwnerId = "ownerId"; + internal const String IncludeDisabledAccounts = "includeDisabledAccounts"; + internal const String IncludeOwner = "includeOwner"; + internal const String StatusReason = "statusReason"; + internal const String IncludeDeletedUsers = "includeDeletedUsers"; + internal const String AccountId = "accountId"; + internal const String UsePrecreated = "usePrecreated"; + internal const string UserType = "userType"; + + //Identity query parameters + internal const String SubjectDescriptors = "subjectDescriptors"; + internal const String SocialDescriptors = "socialDescriptors"; + internal const String Descriptors = "descriptors"; + internal const String IdentityIds = "identityIds"; + internal const String SearchFilter = "searchFilter"; + internal const String FilterValue = "filterValue"; + internal const String QueryMembership = "queryMembership"; + internal const String IdentitySequenceId = "identitySequenceId"; + internal const String GroupSequenceId = "groupSequenceId"; + internal const String OrgIdentitySequenceId = "organizationIdentitySequenceId"; + internal const String PageSize = "pageSize"; + internal const String ScopeId = "scopeId"; + internal const String ScopeIds = "scopeIds"; + internal const String Recurse = "recurse"; + internal const String Deleted = "deleted"; + internal const String ScopeName = "scopeName"; + internal const String MemberId = "memberId"; + internal const String IncludeRestrictedVisibility = "includeRestrictedVisibility"; + internal const String ReadAllIdentities = "readAllIdentities"; + internal const String ReadIdentitiesOptions = "options"; + internal const String DomainId = "domainId"; + + //DelegatedAuthorization query parameters + internal const String UserId = "userId"; + internal const String DisplayName = "displayName"; + internal const String ValidTo = "validTo"; + internal const String Scope = "scope"; + internal const String AccessTokenKey = "key"; + internal const String TokenType = "tokenType"; + + //Security query parameters + internal const String AlwaysAllowAdministrators = "alwaysAllowAdministrators"; + internal const String Descriptor = "descriptor"; + internal const String IncludeExtendedInfo = "includeExtendedInfo"; + internal const String LocalOnly = "localonly"; + internal const String Token = "token"; + internal const String Tokens = "tokens"; + internal const String Delimiter = "delimiter"; + + // Security backing store query parameters + internal const String OldSequenceId = "oldSequenceId"; + internal const String InheritFlag = "inheritFlag"; + internal const String UseVsidSubjects = "useVsidSubjects"; + + //Profile query parameters + internal const String Size = "size"; + internal const String ModifiedSince = "modifiedsince"; + internal const String ModifiedAfterRevision = "modifiedafterrevision"; + internal const String Partition = "partition"; + internal const String Details = "details"; + internal const String WithCoreAttributes = "withcoreattributes"; + internal const String CoreAttributes = "coreattributes"; + internal const String ProfilePageType = "profilePageType"; + internal const String IpAddress = "ipaddress"; + + //ClinetNotification query parameters + internal const String ClientId = "clientId"; + + //File container query parameters + internal const String ArtifactUris = "artifactUris"; + internal const String ScopeIdentifier = "scope"; + internal const String ItemPath = "itemPath"; + internal const String includeDownloadTickets = "includeDownloadTickets"; + internal const String isShallow = "isShallow"; + + //Telemetry query parameters for Licensing + internal const String TelemetryPrefix = "t-"; + + } + + public static class IdentityMruRestApiConstants + { + public const String Add = "add"; + public const String Remove = "remove"; + public const String Update = "update"; + public const String Me = "me"; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class ProfileRestApiConstants + { + public const String Me = "me"; + public const String Value = "value"; + } + + public static class UserRestApiConstants + { + public const String Me = "me"; + public const string JsonMergePatchMediaType = "application/merge-patch+json"; + } + + public static class CustomHttpResponseHeaders + { + public const string ActivityId = "ActivityId"; + } + + public static class ExtensionManagementConstants + { + public const string Me = "me"; + } +} diff --git a/src/Sdk/WebApi/WebApi/HttpClients/FileContainerHttpClient.cs b/src/Sdk/WebApi/WebApi/HttpClients/FileContainerHttpClient.cs new file mode 100644 index 00000000000..4caa5ba8c15 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/HttpClients/FileContainerHttpClient.cs @@ -0,0 +1,703 @@ +using System; +using System.Collections.Generic; +using System.Collections.Specialized; +using System.ComponentModel; +using System.Diagnostics; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.FileContainer.Client +{ + // until we figure out the TFS location service story for REST apis, we leave the serviceDefinition attribute off the class + //[ServiceDefinition(ServiceInterfaces.FileContainerResource, ServiceIdentifiers.FileContainerResource)] + public class FileContainerHttpClient : VssHttpClientBase + { + public event EventHandler UploadFileReportTrace; + public event EventHandler UploadFileReportProgress; + + static FileContainerHttpClient() + { + s_translatedExceptions = new Dictionary(); + s_translatedExceptions.Add("ArtifactUriNotSupportedException", typeof(ArtifactUriNotSupportedException)); + s_translatedExceptions.Add("ContainerAlreadyExistsException", typeof(ContainerAlreadyExistsException)); + s_translatedExceptions.Add("ContainerItemCopyDuplicateTargetsException", typeof(ContainerItemCopyDuplicateTargetsException)); + s_translatedExceptions.Add("ContainerItemCopySourcePendingUploadException", typeof(ContainerItemCopySourcePendingUploadException)); + s_translatedExceptions.Add("ContainerItemCopyTargetChildOfSourceException", typeof(ContainerItemCopyTargetChildOfSourceException)); + s_translatedExceptions.Add("ContainerItemExistsException", typeof(ContainerItemExistsException)); + s_translatedExceptions.Add("ContainerItemNotFoundException", typeof(ContainerItemNotFoundException)); + s_translatedExceptions.Add("ContainerNoContentException", typeof(ContainerNoContentException)); + s_translatedExceptions.Add("ContainerNotFoundException", typeof(ContainerNotFoundException)); + s_translatedExceptions.Add("ContainerUnexpectedContentTypeException", typeof(ContainerUnexpectedContentTypeException)); + s_translatedExceptions.Add("ContainerWriteAccessDeniedException", typeof(ContainerWriteAccessDeniedException)); + s_translatedExceptions.Add("PendingUploadNotFoundException", typeof(PendingUploadNotFoundException)); + + s_currentApiVersion = new ApiResourceVersion(1.0, 4); + } + + public FileContainerHttpClient(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public FileContainerHttpClient(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public FileContainerHttpClient(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public FileContainerHttpClient(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public FileContainerHttpClient(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + /// + /// Queries for file containers + /// + /// List of artifact uris associated with containers. If empty or null will return all containers. + /// + /// + public Task> QueryContainersAsync(List artifactUris, Guid scopeIdentifier, Object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + List> query = AppendContainerQueryString(artifactUris, scopeIdentifier); + return SendAsync>(HttpMethod.Get, FileContainerResourceIds.FileContainer, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Queries for container items in a container. + /// + /// Id of the container to query. + /// Id of the scope to query + /// Path to folder or file. Can be empty or null to query from container root. + /// User state + /// Whether to include download ticket(s) for the container item(s) in the result + /// CancellationToken to cancel the task + /// + public Task> QueryContainerItemsAsync(Int64 containerId, Guid scopeIdentifier, String itemPath = null, Object userState = null, Boolean includeDownloadTickets = false, CancellationToken cancellationToken = default(CancellationToken)) + { + return QueryContainerItemsAsync(containerId, scopeIdentifier, false, itemPath, userState, includeDownloadTickets, cancellationToken); + } + + /// + /// Queries for container items in a container. + /// + /// Id of the container to query. + /// Id of the scope to query + /// Whether to just return immediate children items under the itemPath + /// Path to folder or file. Can be empty or null to query from container root. + /// User state + /// Whether to include download ticket(s) for the container item(s) in the result + /// CancellationToken to cancel the task + public Task> QueryContainerItemsAsync(Int64 containerId, Guid scopeIdentifier, Boolean isShallow, String itemPath = null, Object userState = null, Boolean includeDownloadTickets = false, CancellationToken cancellationToken = default(CancellationToken)) + { + if (containerId < 1) + { + throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId"); + } + + List> query = AppendItemQueryString(itemPath, scopeIdentifier, includeDownloadTickets, isShallow); + return SendAsync>(HttpMethod.Get, FileContainerResourceIds.FileContainer, routeValues: new { containerId = containerId }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Uploads a file in chunks to the specified uri. + /// + /// Stream to upload. + /// CancellationToken to cancel the task + /// Http response message. + public async Task UploadFileAsync( + Int64 containerId, + String itemPath, + Stream fileStream, + Guid scopeIdentifier, + CancellationToken cancellationToken = default(CancellationToken), + int chunkSize = c_defaultChunkSize, + bool uploadFirstChunk = false, + Object userState = null, + Boolean compressStream = true) + { + if (containerId < 1) + { + throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId"); + } + + ArgumentUtility.CheckForNull(fileStream, "fileStream"); + + if (fileStream.Length == 0) + { + HttpRequestMessage requestMessage; + List> query = AppendItemQueryString(itemPath, scopeIdentifier); + + // zero byte upload + requestMessage = await CreateRequestMessageAsync(HttpMethod.Put, FileContainerResourceIds.FileContainer, routeValues: new { containerId = containerId }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + + ApiResourceVersion gzipSupportedVersion = new ApiResourceVersion(new Version(1, 0), 2); + ApiResourceVersion requestVersion = await NegotiateRequestVersionAsync(FileContainerResourceIds.FileContainer, s_currentApiVersion, userState, cancellationToken: cancellationToken).ConfigureAwait(false); + + if (compressStream + && (requestVersion.ApiVersion < gzipSupportedVersion.ApiVersion + || (requestVersion.ApiVersion == gzipSupportedVersion.ApiVersion && requestVersion.ResourceVersion < gzipSupportedVersion.ResourceVersion))) + { + compressStream = false; + } + + Stream streamToUpload = fileStream; + Boolean gzipped = false; + long filelength = fileStream.Length; + + try + { + if (compressStream) + { + if (filelength > 65535) // if file greater than 64K use a file + { + String tempFile = Path.GetTempFileName(); + streamToUpload = File.Create(tempFile, 32768, FileOptions.DeleteOnClose | FileOptions.Asynchronous); + } + else + { + streamToUpload = new MemoryStream((int)filelength + 8); + } + + using (GZipStream zippedStream = new GZipStream(streamToUpload, CompressionMode.Compress, true)) + { + await fileStream.CopyToAsync(zippedStream).ConfigureAwait(false); + } + + if (streamToUpload.Length >= filelength) + { + // compression did not help + streamToUpload.Dispose(); + streamToUpload = fileStream; + } + else + { + gzipped = true; + } + + streamToUpload.Seek(0, SeekOrigin.Begin); + } + + return await UploadFileAsync(containerId, itemPath, streamToUpload, null, filelength, gzipped, scopeIdentifier, cancellationToken, chunkSize, uploadFirstChunk: uploadFirstChunk, userState: userState); + } + finally + { + if (gzipped && streamToUpload != null) + { + streamToUpload.Dispose(); + } + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public async Task UploadFileAsync( + Int64 containerId, + String itemPath, + Stream fileStream, + byte[] contentId, + Int64 fileLength, + Boolean isGzipped, + Guid scopeIdentifier, + CancellationToken cancellationToken = default(CancellationToken), + int chunkSize = c_defaultChunkSize, + int chunkRetryTimes = c_defaultChunkRetryTimes, + bool uploadFirstChunk = false, + Object userState = null) + { + if (containerId < 1) + { + throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId"); + } + + if (chunkSize > c_maxChunkSize) + { + chunkSize = c_maxChunkSize; + } + + // if a contentId is specified but the chunk size is not a 2mb multiple error + if (contentId != null && (chunkSize % c_ContentChunkMultiple) != 0) + { + throw new ArgumentException(FileContainerResources.ChunksizeWrongWithContentId(c_ContentChunkMultiple), "chunkSize"); + } + + ArgumentUtility.CheckForNull(fileStream, "fileStream"); + + ApiResourceVersion gzipSupportedVersion = new ApiResourceVersion(new Version(1, 0), 2); + ApiResourceVersion requestVersion = await NegotiateRequestVersionAsync(FileContainerResourceIds.FileContainer, s_currentApiVersion, userState, cancellationToken).ConfigureAwait(false); + + if (isGzipped + && (requestVersion.ApiVersion < gzipSupportedVersion.ApiVersion + || (requestVersion.ApiVersion == gzipSupportedVersion.ApiVersion && requestVersion.ResourceVersion < gzipSupportedVersion.ResourceVersion))) + { + throw new ArgumentException(FileContainerResources.GzipNotSupportedOnServer(), "isGzipped"); + } + + if (isGzipped && fileStream.Length >= fileLength) + { + throw new ArgumentException(FileContainerResources.BadCompression(), "fileLength"); + } + + HttpRequestMessage requestMessage = null; + List> query = AppendItemQueryString(itemPath, scopeIdentifier); + + if (fileStream.Length == 0) + { + // zero byte upload + FileUploadTrace(itemPath, $"Upload zero byte file '{itemPath}'."); + requestMessage = await CreateRequestMessageAsync(HttpMethod.Put, FileContainerResourceIds.FileContainer, routeValues: new { containerId = containerId }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + + bool multiChunk = false; + int totalChunks = 1; + if (fileStream.Length > chunkSize) + { + totalChunks = (int)Math.Ceiling(fileStream.Length / (double)chunkSize); + FileUploadTrace(itemPath, $"Begin chunking upload file '{itemPath}', chunk size '{chunkSize} Bytes', total chunks '{totalChunks}'."); + multiChunk = true; + } + else + { + FileUploadTrace(itemPath, $"File '{itemPath}' will be uploaded in one chunk."); + chunkSize = (int)fileStream.Length; + } + + StreamParser streamParser = new StreamParser(fileStream, chunkSize); + SubStream currentStream = streamParser.GetNextStream(); + HttpResponseMessage response = null; + + Byte[] dataToSend = new Byte[chunkSize]; + int currentChunk = 0; + Stopwatch uploadTimer = new Stopwatch(); + while (currentStream.Length > 0 && !cancellationToken.IsCancellationRequested) + { + currentChunk++; + + for (int attempt = 1; attempt <= chunkRetryTimes && !cancellationToken.IsCancellationRequested; attempt++) + { + if (attempt > 1) + { + TimeSpan backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(5), TimeSpan.FromSeconds(10)); + FileUploadTrace(itemPath, $"Backoff {backoff.TotalSeconds} seconds before attempt '{attempt}' chunk '{currentChunk}' of file '{itemPath}'."); + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + currentStream.Seek(0, SeekOrigin.Begin); + } + + FileUploadTrace(itemPath, $"Attempt '{attempt}' for uploading chunk '{currentChunk}' of file '{itemPath}'."); + + // inorder for the upload to be retryable, we need the content to be re-readable + // to ensure this we copy the chunk into a byte array and send that + // chunk size ensures we can convert the length to an int + int bytesToCopy = (int)currentStream.Length; + using (MemoryStream ms = new MemoryStream(dataToSend)) + { + await currentStream.CopyToAsync(ms, bytesToCopy, cancellationToken).ConfigureAwait(false); + } + + // set the content and the Content-Range header + HttpContent byteArrayContent = new ByteArrayContent(dataToSend, 0, bytesToCopy); + byteArrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); + byteArrayContent.Headers.ContentLength = currentStream.Length; + byteArrayContent.Headers.ContentRange = new System.Net.Http.Headers.ContentRangeHeaderValue(currentStream.StartingPostionOnOuterStream, + currentStream.EndingPostionOnOuterStream, + streamParser.Length); + FileUploadTrace(itemPath, $"Generate new HttpRequest for uploading file '{itemPath}', chunk '{currentChunk}' of '{totalChunks}'."); + + try + { + if (requestMessage != null) + { + requestMessage.Dispose(); + requestMessage = null; + } + + requestMessage = await CreateRequestMessageAsync( + HttpMethod.Put, + FileContainerResourceIds.FileContainer, + routeValues: new { containerId = containerId }, + version: s_currentApiVersion, + content: byteArrayContent, + queryParameters: query, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + // stop re-try on cancellation. + throw; + } + catch (Exception ex) when (attempt < chunkRetryTimes) // not the last attempt + { + FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' fail to create HttpRequest. Error: {ex.ToString()}."); + continue; + } + + if (isGzipped) + { + //add gzip header info + byteArrayContent.Headers.ContentEncoding.Add("gzip"); + byteArrayContent.Headers.Add("x-tfs-filelength", fileLength.ToString(System.Globalization.CultureInfo.InvariantCulture)); + } + + if (contentId != null) + { + byteArrayContent.Headers.Add("x-vso-contentId", Convert.ToBase64String(contentId)); // Base64FormattingOptions.None is default when not supplied + } + + FileUploadTrace(itemPath, $"Start uploading file '{itemPath}' to server, chunk '{currentChunk}'."); + uploadTimer.Restart(); + + try + { + if (response != null) + { + response.Dispose(); + response = null; + } + + response = await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + // stop re-try on cancellation. + throw; + } + catch (Exception ex) when (attempt < chunkRetryTimes) // not the last attempt + { + FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' fail to send request to server. Error: {ex.ToString()}."); + continue; + } + + uploadTimer.Stop(); + FileUploadTrace(itemPath, $"Finished upload chunk '{currentChunk}' of file '{itemPath}', elapsed {uploadTimer.ElapsedMilliseconds} (ms), response code '{response.StatusCode}'."); + + if (multiChunk) + { + FileUploadProgress(itemPath, currentChunk, (int)Math.Ceiling(fileStream.Length / (double)chunkSize)); + } + + if (response.IsSuccessStatusCode) + { + break; + } + else + { + FileUploadTrace(itemPath, $"Chunk '{currentChunk}' attempt '{attempt}' of file '{itemPath}' received non-success status code {response.StatusCode} for sending request."); + continue; + } + } + + // if we don't have success then bail and return the failed response + if (!response.IsSuccessStatusCode) + { + break; + } + + if (contentId != null && response.StatusCode == HttpStatusCode.Created) + { + // no need to keep uploading since the server said it has all the content + FileUploadTrace(itemPath, $"Stop chunking upload the rest of the file '{itemPath}', since server already has all the content."); + break; + } + + currentStream = streamParser.GetNextStream(); + if (uploadFirstChunk) + { + break; + } + } + + cancellationToken.ThrowIfCancellationRequested(); + + return response; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public async Task> CreateItemsAsync( + Int64 containerId, + List items, + Guid scopeIdentifier, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) + { + List updatedItems = items.Select(x => { x.ContainerId = containerId; x.Status = ContainerItemStatus.PendingUpload; return x; }).ToList(); + + try + { + return await PostAsync, List>( + updatedItems, + FileContainerResourceIds.FileContainer, + routeValues: new { containerId = containerId, scopeIdentifier = scopeIdentifier }, + version: s_currentApiVersion, + userState: userState, + cancellationToken: cancellationToken); + } + catch (Exception) + { + //eat until versioning works in options request + return updatedItems; + } + } + + // for back compat with internal use + [EditorBrowsable(EditorBrowsableState.Never)] + public Task UploadFileToTfsAsync( + Int64 containerId, + String itemPath, + Stream fileStream, + Guid scopeIdentifier, + CancellationToken cancellationToken, + int chunkSize = c_defaultChunkSize, + bool uploadFirstChunk = false, + Object userState = null) + { + return UploadFileAsync(containerId, itemPath, fileStream, scopeIdentifier, cancellationToken, chunkSize, uploadFirstChunk, userState); + } + + /// + /// Download a file from the specified container. + /// + /// + /// + /// + /// + /// A stream of the file content. + public Task DownloadFileAsync( + Int64 containerId, + String itemPath, + CancellationToken cancellationToken, + Guid scopeIdentifier, + Object userState = null) + { + return DownloadAsync(containerId, itemPath, "application/octet-stream", cancellationToken, scopeIdentifier, userState); + } + + /// + /// Download a file or folder as a zip file. + /// + /// + /// + /// + /// + /// A stream of the zip file. + public Task DownloadItemAsZipAsync( + Int64 containerId, + String itemPath, + CancellationToken cancellationToken, + Guid scopeIdentifier, + Object userState = null) + { + return DownloadAsync(containerId, itemPath, "application/zip", cancellationToken, scopeIdentifier, userState); + } + + /// + /// Delete a container item + /// + /// + /// + /// + /// + /// + public Task DeleteContainerItem( + Int64 containerId, + String itemPath, + Guid scopeIdentifier, + CancellationToken cancellationToken = default(CancellationToken), + Object userState = null) + { + if (containerId < 1) + { + throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId"); + } + + List> query = AppendItemQueryString(itemPath, scopeIdentifier); + + return DeleteAsync( + FileContainerResourceIds.FileContainer, + new { containerId = containerId }, + s_currentApiVersion, + query, + userState, + cancellationToken); + } + + private async Task ContainerGetRequestAsync( + Int64 containerId, + String itemPath, + String contentType, + CancellationToken cancellationToken, + Guid scopeIdentifier, + Object userState = null) + { + if (containerId < 1) + { + throw new ArgumentException(WebApiResources.ContainerIdMustBeGreaterThanZero(), "containerId"); + } + + List> query = AppendItemQueryString(itemPath, scopeIdentifier); + HttpRequestMessage requestMessage = await CreateRequestMessageAsync(HttpMethod.Get, FileContainerResourceIds.FileContainer, routeValues: new { containerId = containerId }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + + if (!String.IsNullOrEmpty(contentType)) + { + requestMessage.Headers.Accept.Clear(); + var header = new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue(contentType); + header.Parameters.Add(new System.Net.Http.Headers.NameValueHeaderValue(ApiResourceVersionExtensions.c_apiVersionHeaderKey, "1.0")); + header.Parameters.Add(new System.Net.Http.Headers.NameValueHeaderValue(ApiResourceVersionExtensions.c_legacyResourceVersionHeaderKey, "1")); + requestMessage.Headers.Accept.Add(header); + } + + return await SendAsync(requestMessage, HttpCompletionOption.ResponseHeadersRead, userState, cancellationToken).ConfigureAwait(false); + } + + private List> AppendContainerQueryString(List artifactUris, Guid scopeIdentifier) + { + List> collection = new List>(); + + if (artifactUris != null && artifactUris.Count > 0) + { + String artifactsString = String.Join(",", artifactUris.Select(x => x.AbsoluteUri)); + collection.Add(QueryParameters.ArtifactUris, artifactsString); + } + + collection.Add(QueryParameters.ScopeIdentifier, scopeIdentifier.ToString()); + + return collection; + } + + private List> AppendItemQueryString(String itemPath, Guid scopeIdentifier, Boolean includeDownloadTickets = false, Boolean isShallow = false) + { + List> collection = new List>(); + + if (!String.IsNullOrEmpty(itemPath)) + { + itemPath = FileContainerItem.EnsurePathFormat(itemPath); + collection.Add(QueryParameters.ItemPath, itemPath); + } + + if (includeDownloadTickets) + { + collection.Add(QueryParameters.includeDownloadTickets, "true"); + } + + if (isShallow) + { + collection.Add(QueryParameters.isShallow, "true"); + } + + collection.Add(QueryParameters.ScopeIdentifier, scopeIdentifier.ToString()); + + return collection; + } + + private async Task DownloadAsync( + Int64 containerId, + String itemPath, + String contentType, + CancellationToken cancellationToken, + Guid scopeIdentifier, + Object userState = null) + { + HttpResponseMessage response = await ContainerGetRequestAsync(containerId, itemPath, contentType, cancellationToken, scopeIdentifier, userState).ConfigureAwait(false); + + response.EnsureSuccessStatusCode(); + + if (response.StatusCode == HttpStatusCode.NoContent) + { + throw new ContainerNoContentException(); + } + + if (VssStringComparer.ContentType.Equals(response.Content.Headers.ContentType.MediaType, contentType)) + { + if (response.Content.Headers.ContentEncoding.Contains("gzip", StringComparer.OrdinalIgnoreCase)) + { + return new GZipStream(await response.Content.ReadAsStreamAsync().ConfigureAwait(false), CompressionMode.Decompress); + } + else + { + return await response.Content.ReadAsStreamAsync().ConfigureAwait(false); + } + } + else + { + throw new ContainerUnexpectedContentTypeException(contentType, response.Content.Headers.ContentType.MediaType); + } + } + + private void FileUploadTrace(string file, string message) + { + if (UploadFileReportTrace != null) + { + UploadFileReportTrace(this, new ReportTraceEventArgs(file, message)); + } + } + + private void FileUploadProgress(string file, int currentChunk, int totalChunks) + { + if (UploadFileReportProgress != null) + { + UploadFileReportProgress(this, new ReportProgressEventArgs(file, currentChunk, totalChunks)); + } + } + + /// + /// Exceptions for file container errors + /// + protected override IDictionary TranslatedExceptions + { + get + { + return s_translatedExceptions; + } + } + + private const int c_defaultChunkSize = 8 * 1024 * 1024; + private const int c_defaultChunkRetryTimes = 3; + private const int c_maxChunkSize = 24 * 1024 * 1024; + private const int c_ContentChunkMultiple = 2 * 1024 * 1024; + private static Dictionary s_translatedExceptions; + private static readonly ApiResourceVersion s_currentApiVersion; + } + + public class ReportTraceEventArgs : EventArgs + { + public ReportTraceEventArgs(String file, String message) + { + File = file; + Message = message; + } + + public String File { get; private set; } + public String Message { get; private set; } + } + + public class ReportProgressEventArgs : EventArgs + { + public ReportProgressEventArgs(String file, int currentChunk, int totalChunks) + { + File = file; + CurrentChunk = currentChunk; + TotalChunks = totalChunks; + } + + public String File { get; private set; } + public int CurrentChunk { get; private set; } + public int TotalChunks { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/HttpClients/IdentityHttpClient.cs b/src/Sdk/WebApi/WebApi/HttpClients/IdentityHttpClient.cs new file mode 100644 index 00000000000..e63aea129c6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/HttpClients/IdentityHttpClient.cs @@ -0,0 +1,1167 @@ +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.DelegatedAuthorization; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Exceptions; +using GitHub.Services.Common.Internal; +using System.Linq; +using GitHub.Services.WebApi.Patch.Json; + +namespace GitHub.Services.Identity.Client +{ + [ResourceArea(IdentityResourceIds.AreaId)] + [ClientCircuitBreakerSettings(timeoutSeconds: 100, failurePercentage:80, MaxConcurrentRequests = 110)] + public class IdentityHttpClient : VssHttpClientBase + { + static IdentityHttpClient() + { + s_translatedExceptions = new Dictionary(); + + // 400 (Bad Request) + s_translatedExceptions.Add("IdentityDomainMismatchException", typeof(IdentityDomainMismatchException)); + s_translatedExceptions.Add("AddMemberCyclicMembershipException", typeof(AddMemberCyclicMembershipException)); + s_translatedExceptions.Add("IdentityPropertyRequiredException", typeof(IdentityPropertyRequiredException)); + s_translatedExceptions.Add("IdentityExpressionException", typeof(IdentityExpressionException)); + s_translatedExceptions.Add("InvalidDisplayNameException", typeof(InvalidDisplayNameException)); + s_translatedExceptions.Add("GroupNameNotRecognizedException", typeof(GroupNameNotRecognizedException)); + s_translatedExceptions.Add("IdentityMapReadOnlyException", typeof(IdentityMapReadOnlyException)); + s_translatedExceptions.Add("IdentityNotServiceIdentityException", typeof(IdentityNotServiceIdentityException)); + s_translatedExceptions.Add("InvalidServiceIdentityNameException", typeof(InvalidServiceIdentityNameException)); + s_translatedExceptions.Add("IllegalIdentityException", typeof(IllegalIdentityException)); + s_translatedExceptions.Add("MissingRequiredParameterException", typeof(MissingRequiredParameterException)); + s_translatedExceptions.Add("IncompatibleScopeException", typeof(IncompatibleScopeException)); + + // 403 (Forbidden) + s_translatedExceptions.Add("RemoveAccountOwnerFromAdminGroupException", typeof(RemoveAccountOwnerFromAdminGroupException)); + s_translatedExceptions.Add("RemoveSelfFromAdminGroupException", typeof(RemoveSelfFromAdminGroupException)); + s_translatedExceptions.Add("AddGroupMemberIllegalMemberException", typeof(AddGroupMemberIllegalMemberException)); + s_translatedExceptions.Add("AddGroupMemberIllegalWindowsIdentityException", typeof(AddGroupMemberIllegalWindowsIdentityException)); + s_translatedExceptions.Add("AddGroupMemberIllegalInternetIdentityException", typeof(AddGroupMemberIllegalInternetIdentityException)); + s_translatedExceptions.Add("RemoveSpecialGroupException", typeof(RemoveSpecialGroupException)); + s_translatedExceptions.Add("NotApplicationGroupException", typeof(NotApplicationGroupException)); + s_translatedExceptions.Add("ModifyEveryoneGroupException", typeof(ModifyEveryoneGroupException)); + s_translatedExceptions.Add("NotASecurityGroupException", typeof(NotASecurityGroupException)); + s_translatedExceptions.Add("RemoveMemberServiceAccountException", typeof(RemoveMemberServiceAccountException)); + s_translatedExceptions.Add("AccountPreferencesAlreadyExistException", typeof(AccountPreferencesAlreadyExistException)); + + // 404 (NotFound) + s_translatedExceptions.Add("RemoveGroupMemberNotMemberException", typeof(RemoveGroupMemberNotMemberException)); + s_translatedExceptions.Add("RemoveNonexistentGroupException", typeof(RemoveNonexistentGroupException)); + s_translatedExceptions.Add("FindGroupSidDoesNotExistException", typeof(FindGroupSidDoesNotExistException)); + s_translatedExceptions.Add("GroupScopeDoesNotExistException", typeof(GroupScopeDoesNotExistException)); + s_translatedExceptions.Add("IdentityNotFoundException", typeof(IdentityNotFoundException)); + + // 409 (Conflict) + s_translatedExceptions.Add("GroupCreationException", typeof(GroupCreationException)); + s_translatedExceptions.Add("GroupScopeCreationException", typeof(GroupScopeCreationException)); + s_translatedExceptions.Add("AddMemberIdentityAlreadyMemberException", typeof(AddMemberIdentityAlreadyMemberException)); + s_translatedExceptions.Add("GroupRenameException", typeof(GroupRenameException)); + s_translatedExceptions.Add("IdentityAlreadyExistsException", typeof(IdentityAlreadyExistsException)); + s_translatedExceptions.Add("IdentityAccountNameAlreadyInUseException", typeof(IdentityAccountNameAlreadyInUseException)); + s_translatedExceptions.Add("IdentityAliasAlreadyInUseException", typeof(IdentityAliasAlreadyInUseException)); + s_translatedExceptions.Add("AddProjectGroupProjectMismatchException", typeof(AddProjectGroupProjectMismatchException)); + + // 500 (InternalServerError) + s_translatedExceptions.Add("IdentitySyncException", typeof(IdentitySyncException)); + + // 503 (ServiceUnavailable) + s_translatedExceptions.Add("IdentityProviderUnavailableException", typeof(IdentityProviderUnavailableException)); + + s_currentApiVersion = new ApiResourceVersion(1.0); + } + + public IdentityHttpClient(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public IdentityHttpClient(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public IdentityHttpClient(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public IdentityHttpClient(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public IdentityHttpClient(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + #region Operations on Identities Controller + #region ReadIdentities overloads + /// + /// Reads all identities + /// + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public Task ReadIdentitiesAsync( + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + var queryParams = new List>(); + + return ReadIdentitiesAsyncInternal(queryParams, queryMembership, propertyNameFilters, includeRestrictedVisibility, requestHeadersContext: null, userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Returns identities matching the passed in descriptors + /// + /// List of IdentityDescriptors to query for. + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public virtual Task ReadIdentitiesAsync( + IList descriptors, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return ReadIdentitiesAsync(descriptors, requestHeadersContext: null, queryMembership: queryMembership, + propertyNameFilters: propertyNameFilters, includeRestrictedVisibility: includeRestrictedVisibility, + userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Returns identities matching the passed in descriptors + /// + /// List of IdentityDescriptors to query for. + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public virtual Task ReadIdentitiesAsync( + IList descriptors, + RequestHeadersContext requestHeadersContext, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(descriptors, "descriptors"); + + if (descriptors.Count > maxDescriptors) + { + return ReadIdentitiesBatchAsyncInternal( + descriptors, + queryMembership, + propertyNameFilters, + includeRestrictedVisibility, + requestHeadersContext, + userState, cancellationToken); + } + else + { + var pages = new List>(); + + pages.AddMultiple(QueryParameters.Descriptors, descriptors, SerializeDescriptor); + + return ReadIdentitiesAsyncInternal(pages, queryMembership, propertyNameFilters, includeRestrictedVisibility, requestHeadersContext, userState, cancellationToken); + } + } + + /// + /// Returns identities matching the passed in subject descriptors + /// + /// List of SocialDescriptors to query for. + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public virtual Task ReadIdentitiesAsync( + IList socialDescriptors, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return ReadIdentitiesAsync(socialDescriptors, requestHeadersContext: null, queryMembership: queryMembership, + propertyNameFilters: propertyNameFilters, includeRestrictedVisibility: includeRestrictedVisibility, + userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Returns identities matching the passed in descriptors + /// + /// List of SubjectDescriptors to query for. + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + internal virtual Task ReadIdentitiesAsync( + IList socialDescriptors, + RequestHeadersContext requestHeadersContext, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(socialDescriptors, nameof(socialDescriptors)); + + if (socialDescriptors.Count > maxDescriptors) + { + return ReadIdentitiesBatchAsyncInternal( + socialDescriptors, + queryMembership, + propertyNameFilters, + includeRestrictedVisibility, + requestHeadersContext, + userState, cancellationToken); + } + else + { + var pages = new List>(); + + pages.AddMultiple(QueryParameters.SocialDescriptors, socialDescriptors.Select(descriptor => descriptor.ToString()).ToList()); + + return ReadIdentitiesAsyncInternal(pages, queryMembership, propertyNameFilters, includeRestrictedVisibility, requestHeadersContext, userState, cancellationToken); + } + } + + /// + /// Returns identities matching the passed in subject descriptors + /// + /// List of SubjectDescriptors to query for. + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public virtual Task ReadIdentitiesAsync( + IList subjectDescriptors, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return ReadIdentitiesAsync(subjectDescriptors, requestHeadersContext: null, queryMembership: queryMembership, + propertyNameFilters: propertyNameFilters, includeRestrictedVisibility: includeRestrictedVisibility, + userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Returns identities matching the passed in descriptors + /// + /// List of SubjectDescriptors to query for. + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + internal virtual Task ReadIdentitiesAsync( + IList subjectDescriptors, + RequestHeadersContext requestHeadersContext, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(subjectDescriptors, nameof(subjectDescriptors)); + + if (subjectDescriptors.Count > maxDescriptors) + { + return ReadIdentitiesBatchAsyncInternal( + subjectDescriptors, + queryMembership, + propertyNameFilters, + includeRestrictedVisibility, + requestHeadersContext, + userState, cancellationToken); + } + else + { + var pages = new List>(); + + pages.AddMultiple(QueryParameters.SubjectDescriptors, subjectDescriptors.Select(descriptor => descriptor.ToString()).ToList()); + + return ReadIdentitiesAsyncInternal(pages, queryMembership, propertyNameFilters, includeRestrictedVisibility, requestHeadersContext, userState, cancellationToken); + } + } + + /// + /// Returns identities matching the passed in identifiers + /// + /// Guids representing unique identifiers for the identities + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public virtual Task ReadIdentitiesAsync( + IList identityIds, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return ReadIdentitiesAsync(identityIds, requestHeadersContext: null, queryMembership: queryMembership, + propertyNameFilters: propertyNameFilters, includeRestrictedVisibility: includeRestrictedVisibility, + userState: userState, cancellationToken: cancellationToken); + } + + /// + /// Returns identities matching the passed in identifiers + /// + /// Guids representing unique identifiers for the identities + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + internal virtual Task ReadIdentitiesAsync( + IList identityIds, + RequestHeadersContext requestHeadersContext, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + bool includeRestrictedVisibility = false, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(identityIds, "identityIds"); + + if (identityIds.Count > maxIds) + { + return ReadIdentitiesBatchAsyncInternal( + identityIds, + queryMembership, + propertyNameFilters, + includeRestrictedVisibility, + userState, + requestHeadersContext, + cancellationToken); + } + else + { + var pages = new List>(); + + pages.AddMultiple(QueryParameters.IdentityIds, identityIds, (id) => id.ToString("N")); + + return ReadIdentitiesAsyncInternal(pages, queryMembership, propertyNameFilters, includeRestrictedVisibility, requestHeadersContext, userState, cancellationToken); + } + } + + public Task ReadIdentitiesAsync( + IdentitySearchFilter searchFilter, + string filterValue, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return ReadIdentitiesAsync(searchFilter, filterValue, ReadIdentitiesOptions.None, queryMembership, propertyNameFilters, userState, cancellationToken); + } + + /// + /// Returns identities matching the requested search factor and value + /// + /// + /// + /// Instructs the server whether to query for membership information. + /// Instructs the server which extended properties to query for. + /// Additional client state passed by caller. + /// A Task which when complete, contains the list of identities. + public virtual Task ReadIdentitiesAsync( + IdentitySearchFilter searchFilter, + string filterValue, + ReadIdentitiesOptions options, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckStringForNullOrEmpty(filterValue, "filterValue"); + + List> searchQuery = new List>(); + + searchQuery.Add(QueryParameters.SearchFilter, searchFilter.ToString()); + searchQuery.Add(QueryParameters.FilterValue, filterValue); + searchQuery.Add(QueryParameters.ReadIdentitiesOptions, options.ToString()); + + return ReadIdentitiesAsyncInternal(searchQuery, queryMembership, propertyNameFilters, includeRestrictedVisibility: false, requestHeadersContext: null, userState: userState, cancellationToken: cancellationToken); + } + + public virtual Task ReadIdentitiesAsync( + Guid scopeId, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + var query = new List>(); + query.Add(QueryParameters.ScopeId, scopeId.ToString("N")); + + return ReadIdentitiesAsyncInternal(query, queryMembership, propertyNameFilters, includeRestrictedVisibility: false, requestHeadersContext: null, userState: userState, cancellationToken: cancellationToken); + } + #endregion + + #region ReadIdentity overloads + public Task ReadIdentityAsync( + string identityPuid, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckStringForNullOrEmpty(identityPuid, "identityPuid"); + + return ReadIdentityAsyncInternal( + identityPuid, + queryMembership, + propertyNameFilters, + userState, cancellationToken); + } + + public Task ReadIdentityAsync( + Guid identityId, + QueryMembership queryMembership = QueryMembership.None, + IEnumerable propertyNameFilters = null, + object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForEmptyGuid(identityId, "identityId"); + + return ReadIdentityAsyncInternal( + identityId.ToString("D"), + queryMembership, + propertyNameFilters, + userState, cancellationToken); + } + #endregion + + public async Task> UpdateIdentitiesAsync(IList identities, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "UpdateIdentities")) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(identities, "identities"); + + IdentitiesCollection collection = new IdentitiesCollection(identities); + HttpContent content = new ObjectContent>(new VssJsonCollectionWrapper(collection), base.Formatter); + + return await SendAsync>(HttpMethod.Put, IdentityResourceIds.Identity, version: s_currentApiVersion, content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task UpdateIdentityAsync(Identity identity, object userState, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "UpdateIdentity")) + { + ArgumentUtility.CheckForNull(identity, "identity"); + + HttpContent content = new ObjectContent(identity, base.Formatter); + return await SendAsync(HttpMethod.Put, IdentityResourceIds.Identity, new { identityId = identity.Id }, s_currentApiVersion, content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task SwapIdentityAsync(Guid id1, Guid id2, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "SwapIdentity")) + { + ArgumentUtility.CheckForEmptyGuid(id1, "id1"); + ArgumentUtility.CheckForEmptyGuid(id2, "id2"); + + HttpContent content = new ObjectContent(typeof(SwapIdentityInfo), new SwapIdentityInfo(id1, id2), this.Formatter); + + return await SendAsync(HttpMethod.Post, IdentityResourceIds.SwapLocationId, version: s_currentApiVersion, content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + //REST USAGE NON-STANDARD: Get operations on the same endpoint should return the same resources. This is a different + //resource. + public async Task GetIdentityChangesAsync(int identitySequenceId, int groupSequenceId, Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + int unspecifiedSequenceId = -1; + return await this.GetIdentityChangesAsync(identitySequenceId, groupSequenceId, unspecifiedSequenceId, scopeId, userState, cancellationToken).ConfigureAwait(false); + } + + public async Task GetIdentityChangesAsync(int identitySequenceId, int groupSequenceId, int organizationIdentitySequenceId, Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return await this.GetIdentityChangesAsync(identitySequenceId, groupSequenceId, organizationIdentitySequenceId, 0, scopeId, userState, cancellationToken).ConfigureAwait(false); + } + + public async Task GetIdentityChangesAsync(int identitySequenceId, int groupSequenceId, int organizationIdentitySequenceId, int pageSize, Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetIdentityChanges")) + { + List> query = new List>(); + query.Add(QueryParameters.IdentitySequenceId, identitySequenceId.ToString()); + query.Add(QueryParameters.GroupSequenceId, groupSequenceId.ToString()); + query.Add(QueryParameters.OrgIdentitySequenceId, organizationIdentitySequenceId.ToString()); + query.Add(QueryParameters.PageSize, pageSize.ToString()); + query.Add(QueryParameters.ScopeId, scopeId.ToString("N")); + + return await SendAsync(HttpMethod.Get, IdentityResourceIds.Identity, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task> GetUserIdentityIdsByDomainIdAsync( + Guid domainId, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, nameof(GetUserIdentityIdsByDomainIdAsync))) + { + ArgumentUtility.CheckForEmptyGuid(domainId, nameof(domainId)); + var query = new List>(); + query.Add(QueryParameters.DomainId, domainId.ToString("N")); + return + await + SendAsync>( + method: HttpMethod.Get, + locationId: IdentityResourceIds.Identity, + version: s_currentApiVersion, + queryParameters: query, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + #endregion + + #region Operations on IdentitySelf Controller + + public async Task GetIdentitySelfAsync(object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetIdentitySelf")) + { + return await SendAsync(HttpMethod.Get, IdentityResourceIds.IdentitySelf, version: s_currentApiVersion, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + #endregion + + #region Operations on IdentityTenant Controller + + public async Task GetTenant(string tenantId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetTenant")) + { + //NOTE [RR]: Having to re-create ApiResourceLocation here since /_apis is currently not + //Anonymous and using underlying SendAsync<> overloads throws a ObjectNullRefernceException + //when a null credential, indicating anonymous request, is + var resourceLocation = new ApiResourceLocation + { + Id = IdentityResourceIds.IdentityTenant, + ResourceName = IdentityResourceIds.IdentityTenantResource, + RouteTemplate = "_apis/identities/tenant/{tenantId}", + ResourceVersion = 1, + MinVersion = new Version(1, 0), + MaxVersion = new Version(2, 0), + ReleasedVersion = new Version(0, 0) + }; + + using (var requestMessage = CreateRequestMessage(HttpMethod.Get, resourceLocation, new { tenantId = tenantId }, version: s_currentApiVersion)) + using (var client = new HttpClient()) + { + var response = await client.SendAsync(requestMessage, cancellationToken: cancellationToken); + response.EnsureSuccessStatusCode(); + return await response.Content.ReadAsAsync(new[] { this.Formatter }, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + } + + #endregion + + #region Operations on service identities controller + + public async Task CreateFrameworkIdentityAsync(FrameworkIdentityType identityType, string role, string identifier, string displayName, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "CreateServiceIdentity")) + { + if (identityType == FrameworkIdentityType.None) + { + throw new ArgumentException(CommonResources.EmptyStringNotAllowed(), "identityType"); + } + + ArgumentUtility.CheckStringForNullOrEmpty(displayName, "role"); + ArgumentUtility.CheckStringForNullOrEmpty(displayName, "identifier"); + ArgumentUtility.CheckStringForNullOrEmpty(displayName, "displayName"); + + HttpContent content = new ObjectContent( + typeof(FrameworkIdentityInfo), + new FrameworkIdentityInfo + { + IdentityType = identityType, + Role = role, + Identifier = identifier, + DisplayName = displayName + }, + this.Formatter); + + return await SendAsync(HttpMethod.Put, IdentityResourceIds.FrameworkIdentity, version: s_currentApiVersion, content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + #endregion + + #region Operations on Groups Controller + public virtual async Task ListGroupsAsync(Guid[] scopeIds = null, bool recurse = false, bool deleted = false, IEnumerable propertyNameFilters = null, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ListGroups")) + { + List> query = null; + + if (scopeIds != null || recurse != false || propertyNameFilters != null) + { + query = new List>(); + + if (scopeIds != null) + { + query.AddMultiple(QueryParameters.ScopeIds, scopeIds, (val) => val.ToString("N")); + } + + if (recurse != false) + { + query.Add(QueryParameters.Recurse, "true"); + } + + if (deleted != false) + { + query.Add(QueryParameters.Deleted, "true"); + } + + if (propertyNameFilters != null) + { + query.AddMultiple(QueryParameters.Properties, propertyNameFilters); + } + } + + return await SendAsync(HttpMethod.Get, IdentityResourceIds.Group, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public Task DeleteGroupAsync(IdentityDescriptor descriptor, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return DeleteGroupAsyncInternal(SerializeDescriptor(descriptor), userState, cancellationToken: cancellationToken); + } + + public Task DeleteGroupAsync(Guid groupId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return DeleteGroupAsyncInternal(groupId.ToString(), userState, cancellationToken); + } + + public async Task CreateGroupsAsync(Guid scopeId, IList groups, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "CreateGroup")) + { + ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + ArgumentUtility.CheckEnumerableForNullOrEmpty(groups, "groups"); + + HttpContent content = new ObjectContent(new CreateGroupsInfo(scopeId, groups), this.Formatter); + + return await SendAsync(HttpMethod.Post, IdentityResourceIds.Group, version: s_currentApiVersion, content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + #endregion + + #region Operations on Scopes Controller + public async Task GetScopeAsync(string scopeName, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetScope")) + { + ArgumentUtility.CheckStringForNullOrEmpty(scopeName, "scopeName"); + + List> query = new List>(); + query.Add(QueryParameters.ScopeName, scopeName); + + return await SendAsync(HttpMethod.Get, IdentityResourceIds.Scope, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task GetScopeAsync(Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetScopeById")) + { + ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + + return await SendAsync(HttpMethod.Get, IdentityResourceIds.Scope, new { scopeId = scopeId }, version: s_currentApiVersion, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task CreateScopeAsync(Guid scopeId, Guid parentScopeId, GroupScopeType scopeType, string scopeName, string adminGroupName, string adminGroupDescription, Guid creatorId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "CreateScope")) + { + ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + + //REST USAGE NON-STANDARD: A POST to create a resource should be a reprensentation of the resource being created, in this case an IdentityScope. However, + //the create operation takes parameters not present in the new resource: specifically the adminGroupName and adminGroupDescription. We either need + //to set these in a different way -- on the correct resource -- or include them as part of IdentityScope. + + // Constructor Validates params + CreateScopeInfo info = new CreateScopeInfo(parentScopeId, scopeType, scopeName, adminGroupName, adminGroupDescription, creatorId); + + HttpContent content = new ObjectContent(info, this.Formatter); + + return await SendAsync(HttpMethod.Put, IdentityResourceIds.Scope, new { scopeId = scopeId }, version: s_currentApiVersion, content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task RenameScopeAsync(Guid scopeId, string newName, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "RenameScope")) + { + ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + ArgumentUtility.CheckStringForNullOrEmpty(newName, "newName"); + + IdentityScope rename = new IdentityScope(scopeId, newName); + HttpContent content = new ObjectContent(rename, this.Formatter); + + return await SendAsync(new HttpMethod("PATCH"), IdentityResourceIds.Scope, new { scopeId = scopeId }, version: s_currentApiVersion, content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + // code for version 2 of the API - lets switch to this after the new api version has been in for a sprint + //ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + //ArgumentUtility.CheckStringForNullOrEmpty(newName, "newName"); + //return await UpdateScopeAsync(scopeId, nameof(IdentityScope.Name), newName, userState, cancellationToken); + } + + public async Task DeleteScopeAsync(Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "DeleteScope")) + { + ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + + return await SendAsync(HttpMethod.Delete, IdentityResourceIds.Scope, new { scopeId = scopeId }, version: s_currentApiVersion, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task RestoreGroupScopeAsync(Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForEmptyGuid(scopeId, "scopeId"); + return await UpdateScopeAsync(scopeId, nameof(IdentityScope.IsActive), true, userState, cancellationToken).ConfigureAwait(false); + } + + private async Task UpdateScopeAsync(Guid scopeId, String property, object value, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using(new OperationScope(IdentityResourceIds.AreaName, "UpdateScope")) + { + JsonPatchDocument patchDocument = new JsonPatchDocument{ + new JsonPatchOperation + { + Operation = WebApi.Patch.Operation.Replace, + Path = "/" + property, + Value = value + } + }; + + HttpContent content = new ObjectContent(patchDocument, new VssJsonMediaTypeFormatter(true), "application/json-patch+json"); + + return await SendAsync(new HttpMethod("PATCH"), IdentityResourceIds.Scope, new { scopeId = scopeId }, version: new ApiResourceVersion(5.0, 2), content: content, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + #endregion + + #region Operations on Members\MembersOf Controllers\ + //These methods have analogs on the Members\MemberOf controllers but are unused... + //Task ReadMembershipsAsync(String memberId, QueryMembership queryMembership = QueryMembership.Direct) + + //This one called by IsMember, but not exposed directly + //Task ReadMembershipsAsync(String memberId, String containerId, QueryMembership queryMembership = QueryMembership.Direct) + + //Task ReadMembersAsync(String containerId, QueryMembership queryMembership = QueryMembership.Direct) + + //Task ReadMemberAsync(String containerId, String memberId, QueryMembership queryMembership = QueryMembership.Direct) + + public Task AddMemberToGroupAsync(IdentityDescriptor containerId, Guid memberId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + return AddMemberToGroupAsyncInternal(new { containerId = SerializeDescriptor(containerId), memberId = memberId }, new List>(), userState, cancellationToken); + } + + public Task AddMemberToGroupAsync(IdentityDescriptor containerId, IdentityDescriptor memberId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + //REST USAGE NON-STANDARD: This should not be a query param, as this ends up being a PUT, which should address the resource directly + // (and also see the internal method on non-standard use of PUT). But the memberId may contain a colon, which will cause it to + //be rejected by ASP.NET as dangerous (even if escaped) so doing this as a workaround. + List> query = new List>(); + query.Add(QueryParameters.MemberId, SerializeDescriptor(memberId)); + + return AddMemberToGroupAsyncInternal(new { containerId = SerializeDescriptor(containerId) }, query, userState, cancellationToken); + } + + public async Task RemoveMemberFromGroupAsync(IdentityDescriptor containerId, IdentityDescriptor memberId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "RemoveMemberFromGroup")) + { + //REST USAGE NON-STANDARD: This should not be a query param, as this DELETE which should address the resource directly + //but the memberId may contain a colon, which will cause it to be rejected by ASP.NET as dangerous (even if escaped) so doing + //this as a workaround. + List> query = new List>(); + query.Add(QueryParameters.MemberId, SerializeDescriptor(memberId)); + + return await SendAsync(HttpMethod.Delete, IdentityResourceIds.Member, new { containerId = SerializeDescriptor(containerId) }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task IsMember(IdentityDescriptor containerId, IdentityDescriptor memberId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "IsMember")) + { + List> query = new List>(); + query.Add(QueryParameters.QueryMembership, QueryMembership.Expanded.ToString()); + + //Consider: Can this actually return null? This is how IdentityHttpComponent works... + IdentityDescriptor result = await SendAsync(HttpMethod.Get, IdentityResourceIds.MemberOf, + new { memberId = SerializeDescriptor(memberId), containerId = SerializeDescriptor(containerId) }, + version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + + return result != null; + } + } + #endregion + + #region Operations on IdentitySnapshot controller + public async Task GetIdentitySnapshotAsync(Guid scopeId, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetIdentitySnapshot")) + { + return await SendAsync(HttpMethod.Get, IdentityResourceIds.IdentitySnapshot, version: s_currentApiVersion, routeValues: new { scopeId = scopeId }, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + #endregion + + #region Operations on SignoutToken controller + public async Task GetSignoutToken(object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetSignoutToken")) + { + return await SendAsync( + HttpMethod.Get, + IdentityResourceIds.SignoutToken, + version: s_currentApiVersion, + routeValues: new object { }, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + #endregion + + #region Operations on SignedInToken controller + public async Task GetSignedInToken(object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetSignedInToken")) + { + return await SendAsync( + HttpMethod.Get, + IdentityResourceIds.SignedInToken, + version: s_currentApiVersion, + routeValues: new object { }, + userState: userState, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + #endregion + + #region Operations on IdentitySequenceId Controller + public async Task GetMaxSequenceIdAsync(object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "GetMaxSequenceId")) + { + return await SendAsync(HttpMethod.Get, IdentityResourceIds.IdentityMaxSequenceId, version: s_currentApiVersion, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + #endregion Operations on IdentitySequenceId Controller + + #region Operations on Claims Controller + public async Task CreateOrBindIdentity(Identity sourceIdentity, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "CreateOrBindWithClaims")) + { + ArgumentUtility.CheckForNull(sourceIdentity, nameof(sourceIdentity)); + ArgumentUtility.CheckForNull(sourceIdentity.Descriptor, nameof(sourceIdentity.Descriptor)); + + HttpContent content = new ObjectContent(sourceIdentity, this.Formatter); + + return await SendAsync(HttpMethod.Put, + IdentityResourceIds.Claims, + version: s_currentApiVersion, + userState: userState, + content: content, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + #endregion Operations on Claims Controller + + #region Operations on IdentityDescriptor Controller + /// + /// [Preview API] + /// + /// + /// + /// + /// The cancellation token to cancel operation. + public async Task GetDescriptorByIdAsync( + Guid id, + bool? isMasterId = null, + object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + object routeValues = new { id = id }; + + var queryParams = new List>(); + if (isMasterId != null) + { + queryParams.Add("isMasterId", isMasterId.Value.ToString()); + } + + return await SendAsync( + HttpMethod.Get, + IdentityResourceIds.DescriptorsResourceLocationId, + routeValues: routeValues, + version: new ApiResourceVersion("3.2-preview.1"), + queryParameters: queryParams, + userState: userState, + cancellationToken: + cancellationToken).ConfigureAwait(false); + } + #endregion Operations on IdentityDescriptor Controller + + #region Private Helpers + private async Task ReadIdentitiesAsyncInternal(List> searchQuery, QueryMembership queryMembership, IEnumerable propertyNameFilters, bool includeRestrictedVisibility, RequestHeadersContext requestHeadersContext, object userState, CancellationToken cancellationToken) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ReadIdentities")) + { + AppendQueryString(searchQuery, queryMembership, propertyNameFilters, includeRestrictedVisibility); + var additionalHeaders = RequestHeadersContext.HeadersUtils.PopulateRequestHeaders(requestHeadersContext); + + return await SendAsync(HttpMethod.Get, additionalHeaders, IdentityResourceIds.Identity, version: s_currentApiVersion, queryParameters: searchQuery, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private async Task ReadIdentitiesBatchAsyncInternal( + IList socialDescriptors, + QueryMembership queryMembership, + IEnumerable propertyNameFilters, + bool includeRestrictedVisibility, + RequestHeadersContext requestHeadersContext, + object userState, CancellationToken cancellationToken) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ReadIdentitiesBatch")) + { + IdentityBatchInfo info = new IdentityBatchInfo(socialDescriptors, queryMembership, propertyNameFilters, includeRestrictedVisibility); + + HttpContent content = new ObjectContent(info, base.Formatter); + + var queryParams = new List>() + { + {IdentityBatchTelemetryConstants.QueryMembershipHint, queryMembership.ToString()}, + {IdentityBatchTelemetryConstants.FlavorHint, IdentityBatchTelemetryConstants.BySocialDescriptorFlavor }, + {IdentityBatchTelemetryConstants.CountHint, (socialDescriptors?.Count ?? 0).ToString() }, + }; + + var additionalHeaders = RequestHeadersContext.HeadersUtils.PopulateRequestHeaders(requestHeadersContext); + + return await SendAsync( + HttpMethod.Post, + additionalHeaders, + IdentityResourceIds.IdentityBatch, + version: s_currentApiVersion, + content: content, + userState: userState, + queryParameters: queryParams, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private async Task ReadIdentitiesBatchAsyncInternal( + IList subjectDescriptors, + QueryMembership queryMembership, + IEnumerable propertyNameFilters, + bool includeRestrictedVisibility, + RequestHeadersContext requestHeadersContext, + object userState, CancellationToken cancellationToken) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ReadIdentitiesBatch")) + { + IdentityBatchInfo info = new IdentityBatchInfo(subjectDescriptors, queryMembership, propertyNameFilters, includeRestrictedVisibility); + + HttpContent content = new ObjectContent(info, base.Formatter); + + var queryParams = new List>() + { + {IdentityBatchTelemetryConstants.QueryMembershipHint, queryMembership.ToString()}, + {IdentityBatchTelemetryConstants.FlavorHint, IdentityBatchTelemetryConstants.BySubjectDescriptorFlavor }, + {IdentityBatchTelemetryConstants.CountHint, (subjectDescriptors?.Count ?? 0).ToString() }, + }; + + var additionalHeaders = RequestHeadersContext.HeadersUtils.PopulateRequestHeaders(requestHeadersContext); + + return await SendAsync( + HttpMethod.Post, + additionalHeaders, + IdentityResourceIds.IdentityBatch, + version: s_currentApiVersion, + content: content, + userState: userState, + queryParameters: queryParams, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private async Task ReadIdentitiesBatchAsyncInternal( + IList descriptors, + QueryMembership queryMembership, + IEnumerable propertyNameFilters, + bool includeRestrictedVisibility, + RequestHeadersContext requestHeadersContext, + object userState, CancellationToken cancellationToken) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ReadIdentitiesBatch")) + { + IdentityBatchInfo info = new IdentityBatchInfo(descriptors, queryMembership, propertyNameFilters, includeRestrictedVisibility); + + HttpContent content = new ObjectContent(info, base.Formatter); + + var queryParams = new List>() + { + {IdentityBatchTelemetryConstants.QueryMembershipHint, queryMembership.ToString()}, + {IdentityBatchTelemetryConstants.FlavorHint, IdentityBatchTelemetryConstants.ByDescriptorFlavor }, + {IdentityBatchTelemetryConstants.CountHint, (descriptors?.Count ?? 0).ToString() }, + }; + + var additionalHeaders = RequestHeadersContext.HeadersUtils.PopulateRequestHeaders(requestHeadersContext); + + return await SendAsync( + HttpMethod.Post, + additionalHeaders, + IdentityResourceIds.IdentityBatch, + version: s_currentApiVersion, + content: content, + userState: userState, + queryParameters: queryParams, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private async Task ReadIdentitiesBatchAsyncInternal( + IList identityIds, + QueryMembership queryMembership, + IEnumerable propertyNameFilters, + bool includeRestrictedVisibility, + object userState, + RequestHeadersContext requestHeadersContext, + CancellationToken cancellationToken) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ReadIdentitiesBatch")) + { + IdentityBatchInfo info = new IdentityBatchInfo(identityIds, queryMembership, propertyNameFilters, includeRestrictedVisibility); + + HttpContent content = new ObjectContent(info, base.Formatter); + + var queryParams = new List> + { + {IdentityBatchTelemetryConstants.QueryMembershipHint, queryMembership.ToString()}, + {IdentityBatchTelemetryConstants.FlavorHint, IdentityBatchTelemetryConstants.ByIdFlavor }, + {IdentityBatchTelemetryConstants.CountHint, (identityIds?.Count ?? 0).ToString() }, + }; + + var additionalHeaders = RequestHeadersContext.HeadersUtils.PopulateRequestHeaders(requestHeadersContext); + + return await SendAsync( + HttpMethod.Post, + additionalHeaders, + IdentityResourceIds.IdentityBatch, + version: s_currentApiVersion, + content: content, + userState: userState, + queryParameters: queryParams, + cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + //Separate endpoint for identity and identities + private async Task ReadIdentityAsyncInternal( + string identityId, + QueryMembership queryMembership, + IEnumerable propertyNameFilters, + object userState, CancellationToken cancellationToken) + { + using (new OperationScope(IdentityResourceIds.AreaName, "ReadIdentity")) + { + var query = new List>(); + AppendQueryString(query, queryMembership, propertyNameFilters, false); + + return await SendAsync(HttpMethod.Get, IdentityResourceIds.Identity, new { identityId = identityId }, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private async Task DeleteGroupAsyncInternal(string groupId, object userState, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "DeleteGroup")) + { + return await SendAsync(HttpMethod.Delete, IdentityResourceIds.Group, new { groupId = groupId }, version: s_currentApiVersion, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task AddMemberToGroupAsyncInternal(object routeParams, IEnumerable> query, object userState = null, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(IdentityResourceIds.AreaName, "AddMemberToGroup")) + { + //REST USAGE NON-STANDARD: This is modeled as a PUT operation, but contains no body. PUT should create or replace the resource at this + //address, but in this case, there is no resource, it is adding a link between resources. This should be done differently + return await SendAsync(HttpMethod.Put, IdentityResourceIds.Member, routeParams, version: s_currentApiVersion, queryParameters: query, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private void AppendQueryString(List> queryParams, QueryMembership queryMembership, IEnumerable propertyNameFilters, bool includeRestrictedVisibility) + { + queryParams.Add(QueryParameters.QueryMembership, queryMembership.ToString()); + + queryParams.AddMultiple(QueryParameters.Properties, propertyNameFilters); + + if (includeRestrictedVisibility) + { + queryParams.Add(QueryParameters.IncludeRestrictedVisibility, "true"); + } + } + + private static string SerializeDescriptor(IdentityDescriptor descriptor) + { + if (descriptor == null) + { + return string.Empty; + } + else + { + return string.Join(";", descriptor.IdentityType, descriptor.Identifier); + } + } + + + #endregion + + /// + /// Exceptions for account errors + /// + protected override IDictionary TranslatedExceptions + { + get + { + return s_translatedExceptions; + } + } + + private static class IdentityBatchTelemetryConstants + { + public const string QueryMembershipHint = "queryMembership"; + public const string FlavorHint = "flavor"; + public const string CountHint = "count"; + + public const string ByIdFlavor = "id"; + public const string ByDescriptorFlavor = "descriptor"; + public const string BySubjectDescriptorFlavor = "subjectDescriptor"; + public const string BySocialDescriptorFlavor = "socialDescriptor"; + } + + private static Dictionary s_translatedExceptions; + private static readonly ApiResourceVersion s_currentApiVersion; + private const int maxDescriptors = 5 /* 256 for descriptor + 64 for type + 1 */; + private const int maxIds = 50; + } +} diff --git a/src/Sdk/WebApi/WebApi/HttpClients/LocationHttpClient.cs b/src/Sdk/WebApi/WebApi/HttpClients/LocationHttpClient.cs new file mode 100644 index 00000000000..ab6f465ed99 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/HttpClients/LocationHttpClient.cs @@ -0,0 +1,184 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.Location.Client +{ + [ClientCircuitBreakerSettings(timeoutSeconds: 15, failurePercentage: 80)] + [ClientCancellationTimeout(timeoutSeconds: 30)] + public class LocationHttpClient : VssHttpClientBase + { + static LocationHttpClient() + { + s_translatedExceptions = new Dictionary(); + s_translatedExceptions.Add("ServiceDefinitionDoesNotExistException", typeof(ServiceDefinitionDoesNotExistException)); + s_translatedExceptions.Add("InvalidAccessPointException", typeof(InvalidAccessPointException)); + s_translatedExceptions.Add("InvalidServiceDefinitionException", typeof(InvalidServiceDefinitionException)); + s_translatedExceptions.Add("ParentDefinitionNotFoundException", typeof(ParentDefinitionNotFoundException)); + s_translatedExceptions.Add("CannotChangeParentDefinitionException", typeof(CannotChangeParentDefinitionException)); + s_translatedExceptions.Add("ActionDeniedBySubscriberException", typeof(ActionDeniedBySubscriberException)); + } + + public LocationHttpClient(Uri baseUrl, VssCredentials credentials) + : base(baseUrl, credentials) + { + } + + public LocationHttpClient(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings) + : base(baseUrl, credentials, settings) + { + } + + public LocationHttpClient(Uri baseUrl, VssCredentials credentials, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, handlers) + { + } + + public LocationHttpClient(Uri baseUrl, VssCredentials credentials, VssHttpRequestSettings settings, params DelegatingHandler[] handlers) + : base(baseUrl, credentials, settings, handlers) + { + } + + public LocationHttpClient(Uri baseUrl, HttpMessageHandler pipeline, bool disposeHandler) + : base(baseUrl, pipeline, disposeHandler) + { + } + + public async Task GetConnectionDataAsync(ConnectOptions connectOptions, Int64 lastChangeId, CancellationToken cancellationToken = default(CancellationToken), Object userState = null) + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "GetConnectionData")) + { + var uri = new Uri(PathUtility.Combine(BaseAddress.GetLeftPart(UriPartial.Path), connectSubUrl)); + var uriBuilder = new UriBuilder(uri) { Query = BaseAddress.Query }; + + var query = new List> + { + new KeyValuePair("connectOptions", ((Int32)connectOptions).ToString(CultureInfo.InvariantCulture)), + new KeyValuePair("lastChangeId", ((Int32)lastChangeId).ToString(CultureInfo.InvariantCulture)), + new KeyValuePair("lastChangeId64", lastChangeId.ToString(CultureInfo.InvariantCulture)) + }; + + uri = uriBuilder.Uri.AppendQuery(query); + + var message = new HttpRequestMessage(HttpMethod.Get, uri.ToString()); + message.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + + return await SendAsync(message, userState, cancellationToken).ConfigureAwait(false); + } + } + + public async Task UpdateServiceDefinitionsAsync(IEnumerable definitions, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "UpdateServiceDefinitions")) + { + ArgumentUtility.CheckEnumerableForNullOrEmpty(definitions, "definitions"); + + HttpContent content = new ObjectContent>>(new VssJsonCollectionWrapper>(definitions), base.Formatter); + await SendAsync(new HttpMethod("PATCH"), LocationResourceIds.ServiceDefinitions, null, s_currentApiVersion, content, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task DeleteServiceDefinitionAsync(String serviceType, Guid identifier, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "DeleteServiceDefinitions")) + { + return await SendAsync(HttpMethod.Delete, LocationResourceIds.ServiceDefinitions, new { serviceType = serviceType, identifier = identifier }, s_currentApiVersion, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task> GetServiceDefinitionsAsync() + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "GetServiceDefinitions")) + { + return await SendAsync>(HttpMethod.Get, LocationResourceIds.ServiceDefinitions, null, s_currentApiVersion).ConfigureAwait(false); + } + } + + public async Task> GetServiceDefinitionsAsync(String serviceType) + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "GetServiceDefinitions")) + { + return await SendAsync>(HttpMethod.Get, LocationResourceIds.ServiceDefinitions, new { serviceType = serviceType }, s_currentApiVersion).ConfigureAwait(false); + } + } + + public Task GetServiceDefinitionAsync(String serviceType, Guid identifier, CancellationToken cancellationToken = default(CancellationToken)) + { + return GetServiceDefinitionAsync(serviceType, identifier, allowFaultIn: true, previewFaultIn: false, cancellationToken: cancellationToken); + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public async Task GetServiceDefinitionAsync(String serviceType, Guid identifier, Boolean allowFaultIn, Boolean previewFaultIn, CancellationToken cancellationToken = default(CancellationToken)) + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "GetServiceDefinitions")) + { + List> query = new List>(); + + if (!allowFaultIn) + { + query.Add("allowFaultIn", Boolean.FalseString); + } + + if (previewFaultIn) + { + if (!allowFaultIn) + { + throw new InvalidOperationException("Cannot preview a service definition fault in if we do not allow the fault in."); + } + + query.Add("previewFaultIn", Boolean.TrueString); + } + + return await SendAsync(HttpMethod.Get, LocationResourceIds.ServiceDefinitions, new { serviceType = serviceType, identifier = identifier }, s_currentApiVersion, queryParameters: query, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public async Task FlushSpsServiceDefinitionAsync(Guid hostId, CancellationToken cancellationToken = default(CancellationToken)) + { + // Used when migrating an SPS host to update all registered service definitions across other VSO instances. + using (new OperationScope(LocationResourceIds.LocationServiceArea, "FlushSpsServiceDefinition")) + { + return await SendAsync(HttpMethod.Put, LocationResourceIds.SpsServiceDefinition, new { hostId = hostId }, s_currentApiVersion, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task> GetResourceAreasAsync() + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "GetResourceAreas")) + { + return await SendAsync>(HttpMethod.Get, LocationResourceIds.ResourceAreas, null, new ApiResourceVersion("3.2-preview.1")).ConfigureAwait(false); + } + } + + public async Task GetResourceAreaAsync(Guid areaId) + { + using (new OperationScope(LocationResourceIds.LocationServiceArea, "GetResourceAreas")) + { + return await SendAsync(HttpMethod.Get, LocationResourceIds.ResourceAreas, new { areaId = areaId } , new ApiResourceVersion("3.2-preview.1")).ConfigureAwait(false); + } + } + + /// + /// Exceptions for location errors + /// + protected override IDictionary TranslatedExceptions + { + get + { + return s_translatedExceptions; + } + } + + private static Dictionary s_translatedExceptions; + private const String connectSubUrl = "_apis/connectionData"; + protected static readonly ApiResourceVersion s_currentApiVersion = new ApiResourceVersion(1.0); + } +} diff --git a/src/Sdk/WebApi/WebApi/IdentityRef.cs b/src/Sdk/WebApi/WebApi/IdentityRef.cs new file mode 100644 index 00000000000..106611864d3 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/IdentityRef.cs @@ -0,0 +1,124 @@ +using System; +using System.ComponentModel; +using System.Runtime.Serialization; +using GitHub.Services.Common; +using GitHub.Services.Graph.Client; +using GitHub.Services.WebApi.Xml; +using Newtonsoft.Json; + +namespace GitHub.Services.WebApi +{ + [JsonObject(MemberSerialization = MemberSerialization.OptIn)] + [XmlSerializableDataContract(EnableCamelCaseNameCompat = true)] + public class IdentityRef : GraphSubjectBase, ISecuredObject + { + // The following "new" properties are inherited from the base class, + // but are reimplemented with public setters for back compat. + + public new SubjectDescriptor Descriptor + { + get { return base.Descriptor; } + set { base.Descriptor = value; } + } + + public new string DisplayName + { + get { return base.DisplayName; } + set { base.DisplayName = value; } + } + + public new string Url + { + get { return base.Url; } + set { base.Url = value; } + } + + public new ReferenceLinks Links + { + get { return base.Links; } + set { base.Links = value; } + } + + [DataMember(Name = "id")] + [JsonProperty(PropertyName = "id")] + public String Id { get; set; } + + // Deprecated. See https://dev.azure.com/mseng/VSOnline/_wiki/wikis/VSOnline.wiki?wikiVersion=GBwikiMaster&pagePath=%2FTeam%20Pages%2FPipelines%2FPublic%20projects&anchor=obsolete-identity-fields + /// + /// Deprecated - use Domain+PrincipalName instead + /// + [DataMember(Name = "uniqueName", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "uniqueName", DefaultValueHandling = DefaultValueHandling.Ignore)] + [JsonConverter(typeof(DefaultValueOnPublicAccessJsonConverter))] + public String UniqueName { get; set; } + + /// + /// Deprecated - Can be retrieved by querying the Graph user referenced in the "self" entry of the IdentityRef "_links" dictionary + /// + [DataMember(Name = "directoryAlias", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "directoryAlias", DefaultValueHandling = DefaultValueHandling.Ignore)] + [JsonConverter(typeof(DefaultValueOnPublicAccessJsonConverter))] + public String DirectoryAlias { get; set; } + + /// + /// Deprecated - not in use in most preexisting implementations of ToIdentityRef + /// + [DataMember(Name = "profileUrl", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "profileUrl", DefaultValueHandling = DefaultValueHandling.Ignore)] + [JsonConverter(typeof(DefaultValueOnPublicAccessJsonConverter))] + public String ProfileUrl { get; set; } + + /// + /// Deprecated - Available in the "avatar" entry of the IdentityRef "_links" dictionary + /// + [DataMember(Name = "imageUrl", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "imageUrl", DefaultValueHandling = DefaultValueHandling.Ignore)] + [JsonConverter(typeof(DefaultValueOnPublicAccessJsonConverter))] + public String ImageUrl { get; set; } + + /// + /// Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsGroupType) + /// + [DataMember(Name = "isContainer", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "isContainer", DefaultValueHandling = DefaultValueHandling.Ignore)] + public Boolean IsContainer { get; set; } + + /// + /// Deprecated - Can be inferred from the subject type of the descriptor (Descriptor.IsAadUserType/Descriptor.IsAadGroupType) + /// + [DataMember(Name = "isAadIdentity", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "isAadIdentity", DefaultValueHandling = DefaultValueHandling.Ignore)] + public Boolean IsAadIdentity { get; set; } + + /// + /// Deprecated - Can be retrieved by querying the Graph membership state referenced in the "membershipState" entry of the GraphUser "_links" dictionary + /// + [DataMember(Name = "inactive", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "inactive", DefaultValueHandling = DefaultValueHandling.Ignore)] + public Boolean Inactive { get; set; } + + [DataMember(Name = "isDeletedInOrigin", EmitDefaultValue = false)] + [JsonProperty(PropertyName = "isDeletedInOrigin", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)] + public Boolean IsDeletedInOrigin { get; set; } + + /// + /// This property is for xml compat only. + /// + [DataMember(Name = "displayName", EmitDefaultValue = false)] + [JsonIgnore, Obsolete, EditorBrowsable(EditorBrowsableState.Never)] + public string DisplayNameForXmlSerialization { get => base.DisplayName; set => base.DisplayName = value; } + + /// + /// This property is for xml compat only. + /// + [DataMember(Name = "url", EmitDefaultValue = false)] + [JsonIgnore, Obsolete, EditorBrowsable(EditorBrowsableState.Never)] + public string UrlForXmlSerialization { get => base.Url; set => base.Url = value; } + + Guid ISecuredObject.NamespaceId => GraphSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => GraphSecurityConstants.ReadByPublicIdentifier; + + string ISecuredObject.GetToken() => GraphSecurityConstants.RefsToken; + } +} diff --git a/src/Sdk/WebApi/WebApi/JsonUtility.cs b/src/Sdk/WebApi/WebApi/JsonUtility.cs new file mode 100644 index 00000000000..b8cbd911dac --- /dev/null +++ b/src/Sdk/WebApi/WebApi/JsonUtility.cs @@ -0,0 +1,258 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Text; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi +{ + public static class JsonUtility + { + public static JsonSerializer CreateJsonSerializer() + { + return JsonSerializer.Create(s_serializerSettings.Value); + } + + public static T FromString(String toDeserialize) + { + return FromString(toDeserialize, s_serializerSettings.Value); + } + + public static T FromString( + String toDeserialize, + JsonSerializerSettings settings) + { + if (String.IsNullOrEmpty(toDeserialize)) + { + return default(T); + } + + using (StringReader sr = new StringReader(toDeserialize)) + using (JsonTextReader jsonReader = new JsonTextReader(sr)) + { + JsonSerializer s = JsonSerializer.Create(settings); + return s.Deserialize(jsonReader); + } + } + + public static void Populate( + String toDeserialize, + Object target) + { + using (StringReader sr = new StringReader(toDeserialize)) + using (JsonTextReader jsonReader = new JsonTextReader(sr)) + { + JsonSerializer s = JsonSerializer.Create(s_serializerSettings.Value); + s.Populate(jsonReader, target); + } + } + + public static String ToString(Object toSerialize) + { + return ToString(toSerialize, false); + } + + public static String ToString(IList toSerialize) + { + if (toSerialize == null || toSerialize.Count == 0) + { + return null; + } + + return ToString(toSerialize, false); + } + + public static String ToString( + Object toSerialize, + Boolean indent) + { + if (toSerialize == null) + { + return null; + } + + StringBuilder sb = new StringBuilder(); + using (StringWriter sw = new StringWriter(sb)) + using (JsonTextWriter jsonWriter = new JsonTextWriter(sw)) + { + JsonSerializer s = JsonSerializer.Create(indent ? s_indentSettings.Value : s_serializerSettings.Value); + s.Serialize(jsonWriter, toSerialize); + } + + return sb.ToString(); + } + + public static T Deserialize(Stream streamToRead) + { + return Deserialize(streamToRead, false); + } + + public static T Deserialize( + Stream streamToRead, + Boolean leaveOpen) + { + if (streamToRead == null) + { + return default(T); + } + + using (StreamReader sr = new StreamReader(streamToRead, s_UTF8NoBOM, true, 80 * 1024, leaveOpen)) + using (JsonTextReader jsonReader = new JsonTextReader(sr)) + { + JsonSerializer s = JsonSerializer.Create(s_serializerSettings.Value); + return s.Deserialize(jsonReader); + } + } + + + public static T Deserialize(Byte[] toDeserialize) + { + return Deserialize(toDeserialize, s_serializerSettings.Value); + } + + public static T Deserialize( + Byte[] toDeserialize, + JsonSerializerSettings settings) + { + if (toDeserialize == null || toDeserialize.Length == 0) + { + return default(T); + } + + using (MemoryStream ms = new MemoryStream(toDeserialize)) + { + Stream streamToRead = ms; + if (IsGZipStream(toDeserialize)) + { + streamToRead = new GZipStream(ms, CompressionMode.Decompress); + } + + using (StreamReader sr = new StreamReader(streamToRead, s_UTF8NoBOM, true)) + using (JsonTextReader jsonReader = new JsonTextReader(sr)) + { + JsonSerializer s = JsonSerializer.Create(settings); + return s.Deserialize(jsonReader); + } + } + } + + public static JToken Map( + this JToken token, + Dictionary> mapFuncs) + { + // no map funcs, just clones + mapFuncs = mapFuncs ?? new Dictionary>(); + + Func mapperFunc; + + // process token + switch (token.Type) + { + case JTokenType.Array: + JArray newArray = new JArray(); + foreach (JToken item in token.Children()) + { + JToken child = item; + if (child.HasValues) + { + child = child.Map(mapFuncs); + } + + if (mapFuncs.TryGetValue(child.Type, out mapperFunc)) + { + child = mapperFunc(child); + } + + newArray.Add(child); + } + + return newArray; + + case JTokenType.Object: + JObject copy = new JObject(); + foreach (JProperty prop in token.Children()) + { + JToken child = prop.Value; + if (child.HasValues) + { + child = child.Map(mapFuncs); + } + + if (mapFuncs.TryGetValue(child.Type, out mapperFunc)) + { + child = mapperFunc(child); + } + + copy.Add(prop.Name, child); + } + + return copy; + + case JTokenType.String: + if (mapFuncs.TryGetValue(JTokenType.String, out mapperFunc)) + { + return mapperFunc(token); + } + + return token; + + case JTokenType.Boolean: + case JTokenType.Null: + case JTokenType.Guid: + return token; + + default: + throw new NotSupportedException(WebApiResources.UnexpectedTokenType()); + } + } + + public static Byte[] Serialize( + Object toSerialize, + Boolean compress = true) + { + return Serialize(toSerialize, compress, s_UTF8NoBOM); + } + + public static Byte[] Serialize( + Object toSerialize, + Boolean compress, + Encoding encoding) + { + if (toSerialize == null) + { + return null; + } + + using (MemoryStream ms = new MemoryStream()) + { + Stream streamToWrite = ms; + if (compress) + { + streamToWrite = new GZipStream(ms, CompressionMode.Compress); + } + + using (StreamWriter sw = new StreamWriter(streamToWrite, encoding ?? s_UTF8NoBOM)) + using (JsonTextWriter jsonWriter = new JsonTextWriter(sw)) + { + JsonSerializer s = JsonSerializer.Create(s_serializerSettings.Value); + s.Serialize(jsonWriter, toSerialize); + } + + return ms.ToArray(); + } + } + + private static Boolean IsGZipStream(Byte[] data) + { + return data != null && data.Length > FullGzipHeaderLength && data[0] == GzipHeader[0] && data[1] == GzipHeader[1]; + } + + private const Int32 FullGzipHeaderLength = 10; + private static readonly Byte[] GzipHeader = { 0x1F, 0x8B }; + private static readonly Encoding s_UTF8NoBOM = new UTF8Encoding(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true); + private static readonly Lazy s_serializerSettings = new Lazy(() => new VssJsonMediaTypeFormatter().SerializerSettings); + private static readonly Lazy s_indentSettings = new Lazy(() => { var s = new VssJsonMediaTypeFormatter().SerializerSettings; s.Formatting = Formatting.Indented; return s; }); + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/IJsonWebTokenHeaderProvider.cs b/src/Sdk/WebApi/WebApi/Jwt/IJsonWebTokenHeaderProvider.cs new file mode 100644 index 00000000000..831392961b5 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/IJsonWebTokenHeaderProvider.cs @@ -0,0 +1,10 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.WebApi.Jwt +{ + internal interface IJsonWebTokenHeaderProvider + { + void SetHeaders(IDictionary headers); + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs b/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs new file mode 100644 index 00000000000..73c507688f0 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/JsonWebToken.cs @@ -0,0 +1,687 @@ +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Reflection; +using System.Runtime.Serialization; +using System.Security.Claims; +using System.Text; +using GitHub.Services.Common; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi.Jwt +{ + //while the spec defined other possible algorithms + //in practice these are the only two that are used + [DataContract] + public enum JWTAlgorithm + { + [EnumMember] + None, + + [EnumMember] + HS256, + + [EnumMember] + RS256 + } + + //JsonWebToken is marked as DataContract so + //it can me nested in the Payload as an actor token + //note that the only member serialized is the + //EncodedToken property, and the OnDeserialized method + //decodes everything back out + [DataContract] + [JsonConverter(typeof(JsonWebTokenConverter))] + public sealed class JsonWebToken : IssuedToken + { + //Default lifetime for a JsonWebToken set to 300 seconds (5 minutes) + private const int DefaultLifetime = 300; + + #region Factory Methods + //We chose factory methods for creation, because creation + //generally involves signing the token, which is a bigger operation + //than just using a constructor implies + + //this method is used to instantiate the "self-signed" token for obtaining + //the access token + public static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, VssSigningCredentials credentials) + { + return Create(issuer, audience, validFrom, validTo, default(DateTime), null, null, null, credentials, allowExpiredCertificate: false); + } + + //use this method to instantiate the token with user information + public static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable additionalClaims, JsonWebToken actor) + { + //if you are calling this version of the method additionalClaims and actor cannot be null + ArgumentUtility.CheckForNull(additionalClaims, nameof(additionalClaims)); + ArgumentUtility.CheckForNull(actor, nameof(actor)); + + return Create(issuer, audience, validFrom, validTo, default(DateTime), additionalClaims, actor, null, null, allowExpiredCertificate: false); + } + + //use this method to instantiate the token with user information + public static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable additionalClaims, string actorToken) + { + //if you are calling this version of the method additionalClaims and actor cannot be null + ArgumentUtility.CheckForNull(additionalClaims, nameof(additionalClaims)); + ArgumentUtility.CheckStringForNullOrEmpty(actorToken, nameof(actorToken)); + + return Create(issuer, audience, validFrom, validTo, default(DateTime), additionalClaims, null, actorToken, null, allowExpiredCertificate: false); + } + + public static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable additionalClaims, VssSigningCredentials credentials) + { + //if you are calling this version claims can't be null + ArgumentUtility.CheckForNull(additionalClaims, nameof(additionalClaims)); + + return Create(issuer, audience, validFrom, validTo, default(DateTime), additionalClaims, null, null, credentials, allowExpiredCertificate: false); + } + + public static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable additionalClaims, VssSigningCredentials credentials, bool allowExpiredCertificate) + { + //if you are calling this version claims can't be null + ArgumentUtility.CheckForNull(additionalClaims, nameof(additionalClaims)); + + return Create(issuer, audience, validFrom, validTo, default(DateTime), additionalClaims, null, null, credentials, allowExpiredCertificate); + } + + public static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, DateTime issuedAt, IEnumerable additionalClaims, VssSigningCredentials credentials) + { + //if you are calling this version claims can't be null + ArgumentUtility.CheckForNull(additionalClaims, nameof(additionalClaims)); + + return Create(issuer, audience, validFrom, validTo, issuedAt, additionalClaims, null, null, credentials, allowExpiredCertificate: false); + } + + private static JsonWebToken Create(string issuer, string audience, DateTime validFrom, DateTime validTo, DateTime issuedAt, IEnumerable additionalClaims, JsonWebToken actor, string actorToken, VssSigningCredentials credentials, bool allowExpiredCertificate) + { + ArgumentUtility.CheckStringForNullOrEmpty(issuer, nameof(issuer)); + ArgumentUtility.CheckStringForNullOrEmpty(audience, nameof(audience)); // Audience isn't actually required... + + validFrom = validFrom == default(DateTime) ? DateTime.UtcNow : validFrom.ToUniversalTime(); + validTo = validTo == default(DateTime) ? DateTime.UtcNow + TimeSpan.FromSeconds(DefaultLifetime) : validTo.ToUniversalTime(); + //issuedAt is optional, and breaks certain scenarios if it is present, and breaks others if it is not. + //so only include it if it is explicitly set. + issuedAt = issuedAt == default(DateTime) ? default(DateTime) : issuedAt.ToUniversalTime(); + + JWTHeader header = GetHeader(credentials, allowExpiredCertificate); + JWTPayload payload = new JWTPayload(additionalClaims) { Issuer = issuer, Audience = audience, ValidFrom = validFrom, ValidTo = validTo, IssuedAt = issuedAt }; + + if (actor != null) + { + payload.Actor = actor; + } + else if (actorToken != null) + { + payload.ActorToken = actorToken; + } + + byte[] signature = GetSignature(header, payload, header.Algorithm, credentials); + + return new JsonWebToken(header, payload, signature); + } + + public static JsonWebToken Create(string jwtEncodedString) + { + ArgumentUtility.CheckStringForNullOrEmpty(jwtEncodedString, nameof(jwtEncodedString)); + + JValue value = new JValue(jwtEncodedString); + + return value.ToObject(); + } + #endregion + + #region .ctors + private JsonWebToken() { } + + private JsonWebToken(JWTHeader header, JWTPayload payload, byte[] signature) + { + ArgumentUtility.CheckForNull(header, nameof(header)); + ArgumentUtility.CheckForNull(payload, nameof(payload)); + //signature allowed to be null + _header = header; + _payload = payload; + _signature = signature; + } + #endregion + + #region Base Class Overrides + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.S2S; + } + } + + internal override void ApplyTo(IHttpRequest request) + { + request.Headers.SetValue(Common.Internal.HttpHeaders.Authorization, $"Bearer {this.EncodedToken}"); + } + #endregion + + #region Private Fields + JWTHeader _header; + JWTPayload _payload; + byte[] _signature; + string _encodedToken; + #endregion + + #region Public Properties + public string TokenType => _header.Type; + + public JWTAlgorithm Algorithm => _header.Algorithm; + + public string CertificateThumbprint => _header.CertificateThumbprint; + + public string Audience => _payload.Audience; + + public string Issuer => _payload.Issuer; + + public string Subject => _payload.Subject; + + public string NameIdentifier => _payload.NameIdentifier; + + public string IdentityProvider => _payload.IdentityProvider; + + public DateTime ValidTo => _payload.ValidTo; + + public DateTime ValidFrom => _payload.ValidFrom; + + public DateTime IssuedAt => _payload.IssuedAt; + + public bool TrustedForDelegation => _payload.TrustedForDelegation; + + public JsonWebToken Actor => _payload.Actor; + + public string ApplicationIdentifier => _payload.ApplicationIdentifier; + + public string EncodedToken + { + get + { + if (string.IsNullOrEmpty(_encodedToken)) + { + _encodedToken = this.Encode(); + } + return _encodedToken; + } + private set + { + this._encodedToken = value; + } + } + + public string Scopes => _payload.Scopes; + + #endregion + + #region Public \ Protected Overrides + public override string ToString() + { + return string.Format("{0}.{1}", + this._header.ToString(), this._payload.ToString()); + } + #endregion + + #region Internal Properties + internal IDictionary Header => this._header; + + internal IDictionary Payload => this._payload; + + internal byte[] Signature => this._signature; + + #endregion + + #region Private Helpers + private static JWTHeader GetHeader(VssSigningCredentials credentials, bool allowExpired) + { + //note credentials are allowed to be null here, see ValidateSigningCredentials + JWTHeader header = new JWTHeader(); + + JWTAlgorithm alg = JsonWebTokenUtilities.ValidateSigningCredentials(credentials, allowExpired); + + header.Algorithm = alg; + + if (alg != JWTAlgorithm.None) + { + // Some signing credentials may need to set headers for the JWT + var jwtHeaderProvider = credentials as IJsonWebTokenHeaderProvider; + if (jwtHeaderProvider != null) + { + jwtHeaderProvider.SetHeaders(header); + } + } + + return header; + } + + private static byte[] GetSignature(JWTHeader header, JWTPayload payload, VssSigningCredentials credentials, bool allowExpired) + { + JWTAlgorithm alg = JsonWebTokenUtilities.ValidateSigningCredentials(credentials, allowExpired); + + return GetSignature(header, payload, alg, credentials); + } + + //if we alread have the alg, we assume that the creds have been validated already, + //to save the expense of validating twice in the create function... + private static byte[] GetSignature(JWTHeader header, JWTPayload payload, JWTAlgorithm alg, VssSigningCredentials signingCredentials) + { + if (alg == JWTAlgorithm.None) + { + return null; + } + + ArgumentUtility.CheckForNull(header, nameof(header)); + ArgumentUtility.CheckForNull(payload, nameof(payload)); + + string encoding = string.Format("{0}.{1}", header.JsonEncode(), payload.JsonEncode()); + + byte[] bytes = Encoding.UTF8.GetBytes(encoding); + + switch (alg) + { + case JWTAlgorithm.HS256: + case JWTAlgorithm.RS256: + return signingCredentials.SignData(bytes); + + default: + throw new InvalidOperationException(); + } + } + + private string Encode() + { + string encodedHeader = JsonWebTokenUtilities.JsonEncode(this._header); + string encodedPayload = JsonWebTokenUtilities.JsonEncode(this._payload); + string encodedSignature = null; + if (this._signature != null) + { + encodedSignature = this._signature.ToBase64StringNoPadding(); + } + + return string.Format("{0}.{1}.{2}", encodedHeader, encodedPayload, encodedSignature); + } + + //OnDeserialized never gets called by serializer because we have a custom converter, so call this + //from there... + //[OnDeserialized] + private void OnDeserialized(/*StreamingContext context*/) + { + if (string.IsNullOrEmpty(this._encodedToken)) + throw new JsonWebTokenDeserializationException(); + + string[] fields = this._encodedToken.Split('.'); + + if (fields.Length != 3) + throw new JsonWebTokenDeserializationException(); + + this._header = JsonWebTokenUtilities.JsonDecode(fields[0]); + this._payload = JsonWebTokenUtilities.JsonDecode(fields[1]); + if(!string.IsNullOrEmpty(fields[2])) + { + this._signature = fields[2].FromBase64StringNoPadding(); + } + } + #endregion + + #region Nested Types + [JsonDictionary] + private abstract class JWTSectionBase : Dictionary + { + public override string ToString() + { + return JsonConvert.SerializeObject(this, JsonWebTokenUtilities.DefaultSerializerSettings); + } + + protected T TryGetValueOrDefault(string key) + { + object ret; + if(TryGetValue(key, out ret)) + { + //we have to special case DateTime + if (typeof(T) == typeof(DateTime)) + { + return (T)(object)ConvertDateTime(ret); + } + if (typeof(T).GetTypeInfo().IsEnum && ret is string) + { + return (T)Enum.Parse(typeof(T), (string)ret); + } + return (T)Convert.ChangeType(ret, typeof(T)); + } + + return default(T); + } + + protected System.DateTime ConvertDateTime(object obj) + { + if(obj is DateTime) + { + return (DateTime)obj; + } + else + { + //try to convert to a long, then + //convert from there, we expect it + //to be a Unix time + long longVal = Convert.ToInt64(obj); + + return longVal.FromUnixEpochTime(); + } + + } + } + + //these nested types comprise the header and the payload + //of the JWT, they are [DataContracts] so we can use JSON.NET + //to produce the JSON + + private class JWTHeader : JWTSectionBase + { + public JWTHeader() : base() + { + this.Type = "JWT"; + } + + internal string Type + { + get + { + return TryGetValueOrDefault(JsonWebTokenHeaderParameters.Type); + } + set + { + if (string.IsNullOrEmpty(value)) + { + this.Remove(JsonWebTokenHeaderParameters.Type); + } + else + { + this[JsonWebTokenHeaderParameters.Type] = value; + } + } + } + + internal JWTAlgorithm Algorithm + { + get + { + return TryGetValueOrDefault(JsonWebTokenHeaderParameters.Algorithm); + } + set + { + this[JsonWebTokenHeaderParameters.Algorithm] = value; + } + } + + internal string CertificateThumbprint + { + get + { + return TryGetValueOrDefault(JsonWebTokenHeaderParameters.X509CertificateThumbprint); + } + set + { + if(string.IsNullOrEmpty(value)) + { + this.Remove(JsonWebTokenHeaderParameters.X509CertificateThumbprint); + } + else + { + this[JsonWebTokenHeaderParameters.X509CertificateThumbprint] = value; + } + } + } + } + + private class JWTPayload : JWTSectionBase + { + public JWTPayload() { } + + internal JWTPayload(IEnumerable claims) + { + this.AddRange(JsonWebTokenUtilities.TranslateToJwtClaims(claims.AsEmptyIfNull())); + } + + internal string Audience + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.Audience); + } + set + { + ArgumentUtility.CheckStringForNullOrEmpty(value, nameof(Audience)); + + this[JsonWebTokenClaims.Audience] = value; + } + } + + internal string Issuer + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.Issuer); + } + set + { + ArgumentUtility.CheckStringForNullOrEmpty(value, nameof(Issuer)); + + this[JsonWebTokenClaims.Issuer] = value; + } + } + + internal string Subject + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.Subject); + } + set + { + ArgumentUtility.CheckStringForNullOrEmpty(value, nameof(Subject)); + + this[JsonWebTokenClaims.Subject] = value; + } + } + + internal string NameIdentifier + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.NameId); + } + set + { + if (string.IsNullOrEmpty(value)) + { + this.Remove(JsonWebTokenClaims.NameId); + } + else + { + this[JsonWebTokenClaims.NameId] = value; + } + } + } + + internal string IdentityProvider + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.IdentityProvider); + } + set + { + if (string.IsNullOrEmpty(value)) + { + this.Remove(JsonWebTokenClaims.IdentityProvider); + } + else + { + this[JsonWebTokenClaims.IdentityProvider] = value; + } + } + } + + internal DateTime ValidTo + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.ValidTo); + } + set + { + this[JsonWebTokenClaims.ValidTo] = value; + } + } + + internal DateTime ValidFrom + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.ValidFrom); + } + set + { + this[JsonWebTokenClaims.ValidFrom] = value; + } + } + + internal DateTime IssuedAt + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.IssuedAt); + } + set + { + if (value == default(DateTime)) + { + this.Remove(JsonWebTokenClaims.IssuedAt); + } + else + { + this[JsonWebTokenClaims.IssuedAt] = value; + } + } + } + + + internal bool TrustedForDelegation + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.TrustedForDelegation); + } + set + { + this[JsonWebTokenClaims.TrustedForDelegation] = value; + } + } + + internal string ApplicationIdentifier + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.AppId); + } + set + { + if (string.IsNullOrEmpty(value)) + { + this.Remove(JsonWebTokenClaims.AppId); + } + else + { + this[JsonWebTokenClaims.AppId] = value; + } + } + } + + internal JsonWebToken Actor + { + get + { + if (_actorToken == null && TryGetValueOrDefault(JsonWebTokenClaims.ActorToken) != null) + { + _actorToken = JsonConvert.DeserializeObject((string)this[JsonWebTokenClaims.ActorToken], JsonWebTokenUtilities.DefaultSerializerSettings); + } + return _actorToken; + } + set + { + if (value == null) + { + this.Remove(JsonWebTokenClaims.ActorToken); + } + else + { + this[JsonWebTokenClaims.ActorToken] = JsonConvert.SerializeObject(value); + } + } + } + + internal string ActorToken + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.ActorToken); + } + set + { + this[JsonWebTokenClaims.ActorToken] = value; + } + } + + internal string Scopes + { + get + { + return TryGetValueOrDefault(JsonWebTokenClaims.Scopes); + } + set + { + this[JsonWebTokenClaims.Scopes] = value; + } + } + + private JsonWebToken _actorToken; + } + + //this coverter converts back and forth from the JWT encoded string + //and this full type + internal class JsonWebTokenConverter : VssSecureJsonConverter + { + public override bool CanConvert(Type objectType) + { + return (objectType == typeof(JsonWebToken)); + } + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.Null) return null; + else if (reader.TokenType == JsonToken.String) + { + var ret = new JsonWebToken { EncodedToken = (string)reader.Value }; + ret.OnDeserialized(); + return ret; + } + else + throw new JsonWebTokenDeserializationException(); + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + base.WriteJson(writer, value, serializer); + if (!(value is JsonWebToken)) throw new JsonWebTokenSerializationException(); + + writer.WriteValue(((JsonWebToken)value).EncodedToken); + } + } + #endregion + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenConstants.cs b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenConstants.cs new file mode 100644 index 00000000000..af08b3c2886 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenConstants.cs @@ -0,0 +1,36 @@ +using System.Security.Claims; + +namespace GitHub.Services.WebApi.Jwt +{ + public static class JsonWebTokenClaims + { + public const string ActorToken = "actort"; + public const string Audience = "aud"; + public const string IssuedAt = "iat"; + public const string Issuer = "iss"; + public const string NameId = "nameid"; + public const string IdentityProvider = "identityprovider"; + public const string ValidTo = "exp"; + public const string ValidFrom = "nbf"; + public const string Scopes = "scp"; + public const string RefreshToken = "ret"; + public const string Source = "src"; + public const string Subject = "sub"; + public const string TrustedForDelegation = "trustedfordelegation"; + public const string NameIdLongName = ClaimTypes.NameIdentifier; + public const string IdentityProviderLongName = "http://schemas.microsoft.com/accesscontrolservice/2010/07/claims/identityprovider"; + public const string TenantId = "tid"; + public const string TenantIdLongName = "http://schemas.microsoft.com/identity/claims/tenantid"; + public const string TokenId = "jti"; + public const string AppId = "appid"; + } + + internal static class JsonWebTokenHeaderParameters + { + internal const string Algorithm = "alg"; + internal const string Type = "typ"; + internal const string X509CertificateThumbprint = "x5t"; + internal const string JWTType = "JWT"; + internal const string JWTURNType = "urn:ietf:params:oauth:token-type:jwt"; + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenExceptions.cs b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenExceptions.cs new file mode 100644 index 00000000000..6e726795cf7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenExceptions.cs @@ -0,0 +1,230 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi.Jwt +{ + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "JsonWebTokenException", "GitHub.Services.WebApi.Jwt.JsonWebTokenException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class JsonWebTokenException : VssServiceException + { + public JsonWebTokenException(string message) + : base(message) + { + } + + public JsonWebTokenException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "JsonWebTokenValidationException", "GitHub.Services.WebApi.Jwt.JsonWebTokenValidationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class JsonWebTokenValidationException : JsonWebTokenException + { + public JsonWebTokenValidationException(string message) + : base(message) + { + } + + public JsonWebTokenValidationException(string message, Exception innerException) + : base(message, innerException) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "JsonWebTokenSerializationException", "GitHub.Services.WebApi.Jwt.JsonWebTokenSerializationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class JsonWebTokenSerializationException : JsonWebTokenException + { + public JsonWebTokenSerializationException() : base(JwtResources.SerializationException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "JsonWebTokenDeserializationException", "GitHub.Services.WebApi.Jwt.JsonWebTokenDeserializationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class JsonWebTokenDeserializationException : JsonWebTokenException + { + public JsonWebTokenDeserializationException() + : base(JwtResources.DeserializationException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "DigestUnsupportedException", "GitHub.Services.WebApi.Jwt.DigestUnsupportedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class DigestUnsupportedException : JsonWebTokenException + { + public DigestUnsupportedException(string supportedDigest, string invalidDigest) + : base(JwtResources.DigestUnsupportedException(supportedDigest, invalidDigest)) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidCredentialsException", "GitHub.Services.WebApi.Jwt.InvalidCredentialsException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidCredentialsException : JsonWebTokenException + { + public InvalidCredentialsException(string message) + : base(message) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "SignatureAlgorithmUnsupportedException", "GitHub.Services.WebApi.Jwt.SignatureAlgorithmUnsupportedException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class SignatureAlgorithmUnsupportedException : JsonWebTokenException + { + public SignatureAlgorithmUnsupportedException(string invalidAlgorithm) + : base(JwtResources.SignatureAlgorithmUnsupportedException(invalidAlgorithm)) + { + } + + public SignatureAlgorithmUnsupportedException(int providerType) + : base(JwtResources.ProviderTypeUnsupported(providerType)) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidClockSkewException", "GitHub.Services.WebApi.Jwt.InvalidClockSkewException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidClockSkewException : JsonWebTokenException + { + public InvalidClockSkewException() + : base(JwtResources.InvalidClockSkewException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidValidFromValueException", "GitHub.Services.WebApi.Jwt.InvalidValidFromValueException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidValidFromValueException : JsonWebTokenException + { + public InvalidValidFromValueException() + : base(JwtResources.InvalidValidFromValueException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidValidToValueException", "GitHub.Services.WebApi.Jwt.InvalidValidToValueException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidValidToValueException : JsonWebTokenException + { + public InvalidValidToValueException() + : base(JwtResources.InvalidValidToValueException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ValidFromAfterValidToException", "GitHub.Services.WebApi.Jwt.ValidFromAfterValidToException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ValidFromAfterValidToException : JsonWebTokenException + { + public ValidFromAfterValidToException() + : base(JwtResources.ValidFromAfterValidToException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "ActorValidationException", "GitHub.Services.WebApi.Jwt.ActorValidationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ActorValidationException : JsonWebTokenValidationException + { + public ActorValidationException() + : base(JwtResources.ActorValidationException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "TokenNotYetValidException", "GitHub.Services.WebApi.Jwt.TokenNotYetValidException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TokenNotYetValidException : JsonWebTokenValidationException + { + public TokenNotYetValidException() + : base(JwtResources.TokenNotYetValidException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "TokenExpiredException", "GitHub.Services.WebApi.Jwt.TokenExpiredException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class TokenExpiredException : JsonWebTokenValidationException + { + public TokenExpiredException() + : base(JwtResources.TokenExpiredException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidAudienceException", "GitHub.Services.WebApi.Jwt.InvalidAudienceException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidAudienceException : JsonWebTokenValidationException + { + public InvalidAudienceException() + : base(JwtResources.InvalidAudienceException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidTokenException", "GitHub.Services.WebApi.Jwt.InvalidTokenException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidTokenException : JsonWebTokenValidationException + { + public InvalidTokenException(string message) + : base(message) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "SignatureValidationException", "GitHub.Services.WebApi.Jwt.SignatureValidationException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class SignatureValidationException : JsonWebTokenValidationException + { + public SignatureValidationException() + : base(JwtResources.SignatureValidationException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidIssuerException", "GitHub.Services.WebApi.Jwt.InvalidIssuerException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidIssuerException : JsonWebTokenValidationException + { + public InvalidIssuerException() + : base(JwtResources.InvalidIssuerException()) + { + } + } + + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "InvalidScopeException", "GitHub.Services.WebApi.Jwt.InvalidScopeException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class InvalidScopeException : JsonWebTokenValidationException + { + public InvalidScopeException() + : base(JwtResources.TokenScopeNotAuthorizedException()) + { + + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenUtilities.cs b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenUtilities.cs new file mode 100644 index 00000000000..d296666b75c --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenUtilities.cs @@ -0,0 +1,321 @@ +using System; +using System.Collections.Generic; +using System.Security.Claims; +using System.Text; +using GitHub.Services.Common; +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; + +namespace GitHub.Services.WebApi.Jwt +{ + public static class JsonWebTokenUtilities + { + static JsonWebTokenUtilities() + { + DefaultSerializerSettings = new JsonSerializerSettings(); + DefaultSerializerSettings.Converters.Add(new UnixEpochDateTimeConverter()); + DefaultSerializerSettings.Converters.Add(new StringEnumConverter()); + } + + internal static readonly JsonSerializerSettings DefaultSerializerSettings; + + internal static readonly short MinKeySize = 2048; + + internal static string JsonEncode(this T obj) + { + return JsonEncode((object)obj); + } + + internal static string JsonEncode(object o) + { + ArgumentUtility.CheckForNull(o, nameof(o)); + + string json = JsonConvert.SerializeObject(o, DefaultSerializerSettings); + + return Encoding.UTF8.GetBytes(json).ToBase64StringNoPadding(); + } + + internal static T JsonDecode(string encodedString) + { + ArgumentUtility.CheckStringForNullOrEmpty(encodedString, nameof(encodedString)); + + byte[] bytes = encodedString.FromBase64StringNoPadding(); + + string json = Encoding.UTF8.GetString(bytes); + + return JsonConvert.DeserializeObject(json, DefaultSerializerSettings); + } + + internal static IDictionary TranslateToJwtClaims(IEnumerable claims) + { + ArgumentUtility.CheckForNull(claims, nameof(claims)); + + Dictionary ret = new Dictionary(); + + //there are two claim names that get special treatment + foreach (Claim claim in claims) + { + string claimName = claim.Type; + if (string.Compare(claimName, JsonWebTokenClaims.NameIdLongName, StringComparison.Ordinal) == 0) + { + claimName = JsonWebTokenClaims.NameId; + } + else if (string.Compare(claimName, JsonWebTokenClaims.IdentityProviderLongName, StringComparison.Ordinal) == 0) + { + claimName = JsonWebTokenClaims.IdentityProvider; + } + + ret.Add(claimName, claim.Value); + } + + return ret; + } + + internal static IEnumerable TranslateFromJwtClaims(IDictionary claims) + { + ArgumentUtility.CheckForNull(claims, nameof(claims)); + + List ret = new List(); + + //there are two claim names that get special treatment + foreach (var claim in claims) + { + string claimName = claim.Key; + if (string.Compare(claimName, JsonWebTokenClaims.NameId, StringComparison.Ordinal) == 0) + { + claimName = JsonWebTokenClaims.NameIdLongName; + } + else if (string.Compare(claimName, JsonWebTokenClaims.IdentityProvider, StringComparison.Ordinal) == 0) + { + claimName = JsonWebTokenClaims.IdentityProviderLongName; + } + + ret.Add(new Claim(claimName, claim.Value.ToString())); + } + + return ret; + } + + internal static IEnumerable ExtractClaims(this JsonWebToken token) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + + return TranslateFromJwtClaims(token.Payload); + } + + public static bool IsExpired(this JsonWebToken token) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + + return DateTime.UtcNow > token.ValidTo; + } + + internal static JWTAlgorithm ValidateSigningCredentials(VssSigningCredentials credentials, bool allowExpiredToken = false) + { + if (credentials == null) + { + return JWTAlgorithm.None; + } + + if (!credentials.CanSignData) + { + throw new InvalidCredentialsException(JwtResources.SigningTokenNoPrivateKey()); + } + + if (!allowExpiredToken && credentials.ValidTo.ToUniversalTime() < (DateTime.UtcNow - TimeSpan.FromMinutes(5))) + { + throw new InvalidCredentialsException(JwtResources.SigningTokenExpired()); + } + + return credentials.SignatureAlgorithm; + } + + public static ClaimsPrincipal ValidateToken(this JsonWebToken token, JsonWebTokenValidationParameters parameters) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(parameters, nameof(parameters)); + + ClaimsIdentity actorIdentity = ValidateActor(token, parameters); + ValidateLifetime(token, parameters); + ValidateAudience(token, parameters); + ValidateSignature(token, parameters); + ValidateIssuer(token, parameters); + + ClaimsIdentity identity = new ClaimsIdentity("Federation", parameters.IdentityNameClaimType, ClaimTypes.Role); + + if (actorIdentity != null) + { + identity.Actor = actorIdentity; + } + + IEnumerable claims = token.ExtractClaims(); + + foreach (Claim claim in claims) + { + identity.AddClaim(new Claim(claim.Type, claim.Value, claim.ValueType, token.Issuer)); + } + + return new ClaimsPrincipal(identity); + } + + private static ClaimsIdentity ValidateActor(JsonWebToken token, JsonWebTokenValidationParameters parameters) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(parameters, nameof(parameters)); + + if (!parameters.ValidateActor) + { + return null; + } + + //this recursive call with check the parameters + ClaimsPrincipal principal = token.Actor.ValidateToken(parameters.ActorValidationParameters); + + if (!(principal?.Identity is ClaimsIdentity)) + { + throw new ActorValidationException(); + } + + return (ClaimsIdentity)principal.Identity; + } + + private static void ValidateLifetime(JsonWebToken token, JsonWebTokenValidationParameters parameters) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(parameters, nameof(parameters)); + + if ((parameters.ValidateNotBefore || parameters.ValidateExpiration) && (parameters.ClockSkewInSeconds < 0)) + { + throw new InvalidClockSkewException(); + } + + TimeSpan skew = TimeSpan.FromSeconds(parameters.ClockSkewInSeconds); + + if (parameters.ValidateNotBefore && token.ValidFrom == default(DateTime)) + { + throw new InvalidValidFromValueException(); + } + + if (parameters.ValidateExpiration && token.ValidTo == default(DateTime)) + { + throw new InvalidValidToValueException(); + } + + if (parameters.ValidateExpiration && parameters.ValidateNotBefore && (token.ValidFrom > token.ValidTo)) + { + throw new ValidFromAfterValidToException(); + } + + if (parameters.ValidateNotBefore && (token.ValidFrom > (DateTime.UtcNow + skew))) + { + throw new TokenNotYetValidException(); //validation exception + } + + if (parameters.ValidateExpiration && (token.ValidTo < (DateTime.UtcNow - skew))) + { + throw new TokenExpiredException(); //validation exception + } + } + + private static void ValidateAudience(JsonWebToken token, JsonWebTokenValidationParameters parameters) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(parameters, nameof(parameters)); + + if (!parameters.ValidateAudience) + { + return; + } + + ArgumentUtility.CheckStringForNullOrEmpty(token.Audience, nameof(token.Audience)); + ArgumentUtility.CheckEnumerableForNullOrEmpty(parameters.AllowedAudiences, nameof(parameters.AllowedAudiences)); + + foreach (string audience in parameters.AllowedAudiences) + { + if (string.Compare(audience, token.Audience, StringComparison.OrdinalIgnoreCase) == 0) + { + return; + } + } + + throw new InvalidAudienceException(); //validation exception; + } + + private static void ValidateSignature(JsonWebToken token, JsonWebTokenValidationParameters parameters) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(parameters, nameof(parameters)); + + if (!parameters.ValidateSignature) + { + return; + } + + string encodedData = token.EncodedToken; + + string[] parts = encodedData.Split('.'); + + if (parts.Length != 3) + { + throw new InvalidTokenException(JwtResources.EncodedTokenDataMalformed()); //validation exception + } + + if (string.IsNullOrEmpty(parts[2])) + { + throw new InvalidTokenException(JwtResources.SignatureNotFound()); //validation exception + } + + if (token.Algorithm == JWTAlgorithm.None) + { + throw new InvalidTokenException(JwtResources.InvalidSignatureAlgorithm()); //validation exception + } + + ArgumentUtility.CheckForNull(parameters.SigningCredentials, nameof(parameters.SigningCredentials)); + + //ArgumentUtility.CheckEnumerableForNullOrEmpty(parameters.SigningToken.SecurityKeys, nameof(parameters.SigningToken.SecurityKeys)); + + byte[] sourceInput = Encoding.UTF8.GetBytes(string.Format("{0}.{1}", parts[0], parts[1])); + + byte[] sourceSignature = parts[2].FromBase64StringNoPadding(); + + + try + { + if (parameters.SigningCredentials.VerifySignature(sourceInput, sourceSignature)) + { + return; + } + } + catch (Exception) + { + //swallow exceptions here, we'll throw if nothing works... + } + + throw new SignatureValidationException(); //valiation exception + } + + private static void ValidateIssuer(JsonWebToken token, JsonWebTokenValidationParameters parameters) + { + ArgumentUtility.CheckForNull(token, nameof(token)); + ArgumentUtility.CheckForNull(parameters, nameof(parameters)); + + if (!parameters.ValidateIssuer) + { + return; + } + + ArgumentUtility.CheckStringForNullOrEmpty(token.Issuer, nameof(token.Issuer)); + ArgumentUtility.CheckEnumerableForNullOrEmpty(parameters.ValidIssuers, nameof(parameters.ValidIssuers)); + + foreach (string issuer in parameters.ValidIssuers) + { + if (string.Compare(issuer, token.Issuer, StringComparison.OrdinalIgnoreCase) == 0) + { + return; + } + } + + throw new InvalidIssuerException(); //validation exception; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenValidationParameters.cs b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenValidationParameters.cs new file mode 100644 index 00000000000..9e039fe2358 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/JsonWebTokenValidationParameters.cs @@ -0,0 +1,92 @@ +using System.Collections.Generic; +using System.Security.Claims; + +namespace GitHub.Services.WebApi.Jwt +{ + public sealed class JsonWebTokenValidationParameters + { + public JsonWebTokenValidationParameters() + { + ValidateActor = false; + ValidateAudience = true; + ValidateIssuer = true; + ValidateExpiration = true; + ValidateNotBefore = false; + ValidateSignature = true; + ClockSkewInSeconds = 300; + IdentityNameClaimType = ClaimTypes.NameIdentifier; + } + + public bool ValidateActor + { + get; + set; + } + + public bool ValidateAudience + { + get; + set; + } + + public bool ValidateIssuer + { + get; + set; + } + + public bool ValidateExpiration + { + get; + set; + } + + public bool ValidateNotBefore + { + get; + set; + } + + public bool ValidateSignature + { + get; + set; + } + + public JsonWebTokenValidationParameters ActorValidationParameters + { + get; + set; + } + + public IEnumerable AllowedAudiences + { + get; + set; + } + + public int ClockSkewInSeconds + { + get; + set; + } + + public VssSigningCredentials SigningCredentials + { + get; + set; + } + + public IEnumerable ValidIssuers + { + get; + set; + } + + public string IdentityNameClaimType + { + get; + set; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Jwt/UnixEpochDateTimeConverter.cs b/src/Sdk/WebApi/WebApi/Jwt/UnixEpochDateTimeConverter.cs new file mode 100644 index 00000000000..a2cbf66d0f7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Jwt/UnixEpochDateTimeConverter.cs @@ -0,0 +1,24 @@ +using System; +using GitHub.Services.Common; +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; + +namespace GitHub.Services.WebApi.Jwt +{ + class UnixEpochDateTimeConverter : DateTimeConverterBase + { + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + long unixVal = reader.Value is string ? long.Parse((string) reader.Value) : (long)reader.Value; + + return unixVal.FromUnixEpochTime(); + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + long unixVal = ((DateTime)value).ToUnixEpochTime(); + + writer.WriteValue(unixVal); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Location/Interfaces.cs b/src/Sdk/WebApi/WebApi/Location/Interfaces.cs new file mode 100644 index 00000000000..487d0918925 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Location/Interfaces.cs @@ -0,0 +1,592 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Location; + +namespace GitHub.Services.WebApi.Location +{ + /// + /// The service responsible for providing a connection to a Team + /// Foundation Server as well as the locations of other services that + /// are available on it. + /// + [VssClientServiceImplementation(typeof(LocationService))] + public interface ILocationService : IVssClientService + { + /// + /// Gets the provider of location data specified by the given location area guid. + /// The provider could be local or remote depending on where the area data is hosted + /// in the location hierarchy in relation to this service instance. Returns null if + /// the area could not be found + /// + /// + /// + ILocationDataProvider GetLocationData(Guid locationAreaIdentifier); + + /// + /// Gets the URL of the location service for the given location area guid and access mapping moniker. + /// If the area could not be found this method will return null. This is useful for getting the + /// base URL of service hosts, or of other service instances or resource areas. + /// + /// To find a specific service definition contained in the given location area and to formulate + /// the proper URL for a specific resource in that location area, you would need to + /// retrieve the location data for that area. This operation is simplified by calling GetLocationData + /// + /// + /// + String GetLocationServiceUrl(Guid locationAreaIdentifier); + + /// + /// Gets the URL of the location service for the given location area guid and access mapping moniker. + /// If the area could not be found this method will return null. This is useful for getting the + /// base URL of service hosts, or of other service instances or resource areas. + /// + /// To find a specific service definition contained in the given location area and to formulate + /// the proper URL for a specific resource in that location area, you would need to + /// retrieve the location data for that area. This operation is simplified by calling GetLocationData + /// + /// + /// + /// + String GetLocationServiceUrl(Guid locationAreaIdentifier, String accessMappingMoniker); + + #region Async APIs + + /// + /// Gets the provider of location data specified by the given location area guid. + /// The provider could be local or remote depending on where the area data is hosted + /// in the location hierarchy in relation to this service instance. Returns null if + /// the area could not be found + /// + /// + /// + /// + Task GetLocationDataAsync( + Guid locationAreaIdentifier, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Gets the URL of the location service for the given location area guid and access mapping moniker. + /// If the area could not be found this method will return null. This is useful for getting the + /// base URL of service hosts, or of other service instances or resource areas. + /// + /// To find a specific service definition contained in the given location area and to formulate + /// the proper URL for a specific resource in that location area, you would need to + /// retrieve the location data for that area. This operation is simplified by calling GetLocationData + /// + /// + /// + /// + /// + Task GetLocationServiceUrlAsync( + Guid locationAreaIdentifier, + String accessMappingMoniker = null, + CancellationToken cancellationToken = default(CancellationToken)); + + #endregion + } + + /// + /// The service responsible for providing a connection to a Team + /// Foundation Server as well as the locations of other services that + /// are available on it. + /// + public interface ILocationDataProvider + { + /// + /// The unique identifier for this server. + /// + Guid InstanceId { get; } + + + /// + /// The identifier of the type of server instance. + /// + Guid InstanceType { get; } + + /// + /// The AccessMapping for the current connection to the server. Note, it is + /// possible that the current ClientAccessMapping is not a member of the + /// ConfiguredAccessMappings if the access point this client used to connect to + /// the server has not been configured on it. This will never be null. + /// + AccessMapping ClientAccessMapping { get; } + + /// + /// The default AccessMapping for this location service. This will never be null. + /// + AccessMapping DefaultAccessMapping { get; } + + /// + /// All of the AccessMappings that this location service knows about. Because a + /// given location service can inherit AccessMappings from its parent these + /// AccessMappings may exist on this location service or its parent. + /// + IEnumerable ConfiguredAccessMappings { get; } + + // + // Saves the provided ServiceDefinition within the location service. This + // operation will assign the Identifier property on the ServiceDefinition object + // if one is not already assigned. Any AccessMappings referenced in the + // LocationMappings property must already be configured with the location + // service. + // + // + // The ServiceDefinition to save. This object will be updated with a new + // Identifier if one is not already assigned. + // + //void SaveServiceDefinition( + // ServiceDefinition serviceDefinition); + + // + // Saves the provided ServiceDefinitions within the location service. This + // operation will assign the Identifier property on the ServiceDefinition + // objects if one is not already assigned. Any AccessMappings referenced in + // the LocationMappings property must already be configured with the location + // service. + // + // + // The ServiceDefinitions to save. These objects will be updated with a new + // Identifier if one is not already assigned. + // + //void SaveServiceDefinitions( + // IEnumerable serviceDefinitions); + + // + // Removes the ServiceDefinition with the specified service type and + // service identifier from the location serivce. + // + // + // The service type of the ServiceDefinition to remove. + // + // + // The service identifier of the ServiceDefinition to remove. + // + //void RemoveServiceDefinition( + // String serviceType, + // Guid serviceIdentifier); + + // + // Removes the specified ServiceDefinition from the location service. + // + // + // The ServiceDefinition to remove. This must be a ServiceDefinition that is + // already registered in the location service. + // Equality is decided by matching the service type and the identifier. + // + //void RemoveServiceDefinition( + // ServiceDefinition serviceDefinition); + + // + // Removes the specified ServiceDefinitions from the location service. + // + // + // The ServiceDefinitions to remove. These must be ServiceDefinitions that are + // already registered in the location service. + // Equality is decided by matching the service type and the identifier. + // + //void RemoveServiceDefinitions( + // IEnumerable serviceDefinitions); + + /// + /// Finds the ServiceDefinition with the specified service type and service + /// identifier. If no matching ServiceDefinition exists, null is returned. + /// + /// + /// The service type of the ServiceDefinition to find. + /// + /// + /// The service identifier of the ServiceDefinition + /// to find. + /// + /// + /// The ServiceDefinition with the specified service type and service identifier. + /// If no matching ServiceDefinition exists, null is returned. + /// + ServiceDefinition FindServiceDefinition( + String serviceType, + Guid serviceIdentifier); + + /// + /// Finds the ServiceDefinitions for all of the services with the + /// specified service type. If no ServiceDefinitions of this type + /// exist, an empty enumeration will be returned. + /// + /// + /// The case-insensitive string that identifies what type of service is being + /// requested. If this value is null, ServiceDefinitions for all services + /// registered with this location service will be returned. + /// + /// + /// ServiceDefinitions for all of the services with the specified service type. + /// If no ServiceDefinitions of this type exist, an empty enumeration will be + /// returned. + /// + IEnumerable FindServiceDefinitions( + String serviceType); + + /// + /// Returns the location for the ServiceDefintion associated with the ServiceType + /// and ServiceIdentifier that should be used based on the current connection. + /// If a ServiceDefinition with the ServiceType and ServiceIdentifier does not + /// exist then null will be returned. If a ServiceDefinition with the ServiceType + /// and ServiceIdentifier is found then a location will be returned if the + /// ServiceDefinition is well formed (otherwise an exception will be thrown). + /// + /// When determining what location to return for the ServiceDefinition and + /// current connection the following rules will be applied: + /// + /// 1. Try to find a location for the ClientAccessMapping. + /// 2. Try to find a location for the DefaultAccessMapping. + /// 3. Use the first location in the LocationMappings list. + /// + /// + /// The service type of the ServiceDefinition to find the location for. + /// + /// + /// The service identifier of the ServiceDefinition to find the location for. + /// + /// + /// The location for the ServiceDefinition with the provided service type and + /// identifier that should be used based on the current connection. + /// + String LocationForCurrentConnection( + String serviceType, + Guid serviceIdentifier); + + /// + /// Returns the location for the ServiceDefintion that should be used based on + /// the current connection. This method will never return null or empty. If it + /// succeeds it will return a targetable location for the provided + /// ServiceDefinition. + /// + /// When determining what location to return for the ServiceDefinition and + /// current connection the following rules will be applied: + /// + /// 1. Try to find a location for the ClientAccessMapping. + /// 2. Try to find a location for the DefaultAccessMapping. + /// 3. Use the first location in the LocationMappings list. + /// + /// + /// The ServiceDefinition to find the location for. + /// + /// + /// The location for the given ServiceDefinition that should be + /// used based on the current connection. + /// + String LocationForCurrentConnection( + ServiceDefinition serviceDefinition); + + /// + /// Returns the location for the ServiceDefinition that has the specified + /// service type and service identifier for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + /// + /// The service type of the ServiceDefinition to find the location for. + /// + /// + /// The service identifier of the ServiceDefinition to find the location for. + /// + /// The AccessMapping to find the location for. + /// + /// The location for the ServiceDefinition for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + String LocationForAccessMapping( + String serviceType, + Guid serviceIdentifier, + AccessMapping accessMapping); + + /// + /// Returns the location for the ServiceDefinition for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + /// + /// The ServiceDefinition to find the location for. + /// + /// The AccessMapping to find the location for. + /// + /// The location for the ServiceDefinition for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + String LocationForAccessMapping( + ServiceDefinition serviceDefinition, + AccessMapping accessMapping); + + // + // Configures the AccessMapping with the provided moniker to have the provided + // display name and access point. This function also allows for this + // AccessMapping to be made the default AccessMapping. + // + // + // A string that uniquely identifies this AccessMapping. This value cannot be + // null or empty. + // + // + // Display name for this AccessMapping. This value cannot be null or empty. + // + // + // This is the base url for the server that will map to this AccessMapping. + // This value cannot be null or empty. + // + // The access point should consist of the scheme, authority, port and web + // application virtual path of the targetable server address. For example, an + // access point will most commonly look like this: + // + // http://server:8080/tfs/ + // + // + // If true, this AccessMapping will be made the default AccessMapping. If false, + // the default AccessMapping will not change. + // + // The AccessMapping object that was just configured. + //AccessMapping ConfigureAccessMapping( + // String moniker, + // String displayName, + // String accessPoint, + // Boolean makeDefault); + + // + // Sets the default AccessMapping to the AccessMapping passed in. + // + // + // The AccessMapping that should become the default AccessMapping. This + // AccessMapping must already be configured with this location service. + // + //void SetDefaultAccessMapping( + // AccessMapping accessMapping); + + /// + /// Gets the AccessMapping with the specified moniker. Returns null + /// if an AccessMapping with the supplied moniker does not exist. + /// + /// + /// The moniker for the desired AccessMapping. This value cannot be null or + /// empty. + /// + /// + /// The AccessMapping with the supplied moniker or null if one does not exist. + /// + AccessMapping GetAccessMapping( + String moniker); + + // + // Removes an AccessMapping and all of the locations that are mapped + // to it within ServiceDefinitions. + // + // The moniker for the AccessMapping to remove. + //void RemoveAccessMapping( + // String moniker); + + /// + /// Get the API resource locations -- a collection of versioned URL paths that + /// are keyed by a location identitifer + /// + /// + ApiResourceLocationCollection GetResourceLocations(); + + #region Async APIs + + /// + /// The unique identifier for this server. + /// + Task GetInstanceIdAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// The identifier of the type of server instance. + /// + Task GetInstanceTypeAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// The AccessMapping for the current connection to the server. Note, it is + /// possible that the current ClientAccessMapping is not a member of the + /// ConfiguredAccessMappings if the access point this client used to connect to + /// the server has not been configured on it. This will never be null. + /// + /// + /// + Task GetClientAccessMappingAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// The default AccessMapping for this location service. This will never be null. + /// + /// + /// + Task GetDefaultAccessMappingAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// All of the AccessMappings that this location service knows about. Because a + /// given location service can inherit AccessMappings from its parent these + /// AccessMappings may exist on this location service or its parent. + /// + /// + /// + Task> GetConfiguredAccessMappingsAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Finds the ServiceDefinition with the specified service type and service + /// identifier. If no matching ServiceDefinition exists, null is returned. + /// + /// + /// The service type of the ServiceDefinition to find. + /// + /// + /// The service identifier of the ServiceDefinition + /// to find. + /// + /// + /// The ServiceDefinition with the specified service type and service identifier. + /// If no matching ServiceDefinition exists, null is returned. + /// + Task FindServiceDefinitionAsync( + String serviceType, + Guid serviceIdentifier, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Finds the ServiceDefinitions for all of the services with the + /// specified service type. If no ServiceDefinitions of this type + /// exist, an empty enumeration will be returned. + /// + /// + /// The case-insensitive string that identifies what type of service is being + /// requested. If this value is null, ServiceDefinitions for all services + /// registered with this location service will be returned. + /// + /// + /// ServiceDefinitions for all of the services with the specified service type. + /// If no ServiceDefinitions of this type exist, an empty enumeration will be + /// returned. + /// + Task> FindServiceDefinitionsAsync( + String serviceType, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Returns the location for the ServiceDefintion associated with the ServiceType + /// and ServiceIdentifier that should be used based on the current connection. + /// If a ServiceDefinition with the ServiceType and ServiceIdentifier does not + /// exist then null will be returned. If a ServiceDefinition with the ServiceType + /// and ServiceIdentifier is found then a location will be returned if the + /// ServiceDefinition is well formed (otherwise an exception will be thrown). + /// + /// When determining what location to return for the ServiceDefinition and + /// current connection the following rules will be applied: + /// + /// 1. Try to find a location for the ClientAccessMapping. + /// 2. Try to find a location for the DefaultAccessMapping. + /// 3. Use the first location in the LocationMappings list. + /// + /// + /// The service type of the ServiceDefinition to find the location for. + /// + /// + /// The service identifier of the ServiceDefinition to find the location for. + /// + /// + /// The location for the ServiceDefinition with the provided service type and + /// identifier that should be used based on the current connection. + /// + Task LocationForCurrentConnectionAsync( + String serviceType, + Guid serviceIdentifier, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Returns the location for the ServiceDefintion that should be used based on + /// the current connection. This method will never return null or empty. If it + /// succeeds it will return a targetable location for the provided + /// ServiceDefinition. + /// + /// When determining what location to return for the ServiceDefinition and + /// current connection the following rules will be applied: + /// + /// 1. Try to find a location for the ClientAccessMapping. + /// 2. Try to find a location for the DefaultAccessMapping. + /// 3. Use the first location in the LocationMappings list. + /// + /// + /// The ServiceDefinition to find the location for. + /// + /// + /// The location for the given ServiceDefinition that should be + /// used based on the current connection. + /// + Task LocationForCurrentConnectionAsync( + ServiceDefinition serviceDefinition, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Returns the location for the ServiceDefinition that has the specified + /// service type and service identifier for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + /// + /// The service type of the ServiceDefinition to find the location for. + /// + /// + /// The service identifier of the ServiceDefinition to find the location for. + /// + /// The AccessMapping to find the location for. + /// + /// The location for the ServiceDefinition for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + Task LocationForAccessMappingAsync( + String serviceType, + Guid serviceIdentifier, + AccessMapping accessMapping, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Returns the location for the ServiceDefinition for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + /// + /// The ServiceDefinition to find the location for. + /// + /// The AccessMapping to find the location for. + /// + /// The location for the ServiceDefinition for the provided + /// AccessMapping. If this ServiceDefinition is FullyQualified and no + /// LocationMapping exists for this AccessMapping then null will be returned. + /// + Task LocationForAccessMappingAsync( + ServiceDefinition serviceDefinition, + AccessMapping accessMapping, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Gets the AccessMapping with the specified moniker. Returns null + /// if an AccessMapping with the supplied moniker does not exist. + /// + /// + /// The moniker for the desired AccessMapping. This value cannot be null or + /// empty. + /// + /// + /// The AccessMapping with the supplied moniker or null if one does not exist. + /// + Task GetAccessMappingAsync( + String moniker, + CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Get the API resource locations -- a collection of versioned URL paths that + /// are keyed by a location identitifer + /// + /// + Task GetResourceLocationsAsync(CancellationToken cancellationToken = default(CancellationToken)); + + #endregion + } +} diff --git a/src/Sdk/WebApi/WebApi/Location/LocationCacheManager.cs b/src/Sdk/WebApi/WebApi/Location/LocationCacheManager.cs new file mode 100644 index 00000000000..aad8fc5509f --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Location/LocationCacheManager.cs @@ -0,0 +1,941 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.Threading; +using System.Xml; +using GitHub.Services.Common; +using GitHub.Services.Location; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.WebApi.Location +{ + /// + /// This class is responsible for managing both the in-memory and disk cache + /// for the location service. + /// + internal class LocationCacheManager + { + /// + /// Creates a new cache manager for the serverGuid passed in. + /// + /// + public LocationCacheManager(Guid serverGuid, Guid serviceOwner, Uri connectionBaseUrl) + { + m_cacheAvailable = (serverGuid.Equals(Guid.Empty)) ? false : true; + + m_lastChangeId = -1; + m_cacheExpirationDate = DateTime.MinValue; + +#if !NETSTANDARD + ClientCacheTimeToLive = VssClientSettings.ClientCacheTimeToLive; +#endif + if (serviceOwner == Guid.Empty) + { + // For a legacy server (which didn't return serviceOwner in the connectionData), let's not try to break anything + // just use the old path. + // We should be able to remove this case eventually. + m_cacheFilePath = Path.Combine(Path.Combine(VssClientSettings.ClientCacheDirectory, serverGuid.ToString()), + s_cacheFileName); + } + else + { + m_cacheFilePath = Path.Combine(Path.Combine(Path.Combine(VssClientSettings.ClientCacheDirectory, serverGuid.ToString()), serviceOwner.ToString()), + s_cacheFileName); + } + + m_cacheLocallyFresh = false; + m_accessMappings = new Dictionary(VssStringComparer.AccessMappingMoniker); + m_services = new Dictionary>(VssStringComparer.ServiceType); + m_cachedMisses = new HashSet(VssStringComparer.ServiceType); + + m_connectionBaseUrl = connectionBaseUrl; + m_locationXmlOperator = new LocationXmlOperator(true); + } + + /// + /// True if there is a cache on disk available for this server + /// + public Boolean LocalCacheAvailable + { + get + { + EnsureDiskCacheLoaded(); + + return m_cacheAvailable; + } + } + + /// + /// Whether or not the cached data has expired (and should be refreshed) + /// + internal Boolean CacheDataExpired + { + get + { + // A) Cache is available (i.e. we're not relying solely on the memory cache because the disk cache file was unavailable) + // and B) The memory cache is correct with the disk cache (necessary to enforce expiration) + // and C) It is after the expiration time + return m_cacheAvailable && m_cacheLocallyFresh && DateTime.UtcNow >= m_cacheExpirationDate; + } + } + + public AccessMapping ClientAccessMapping + { + get + { + m_accessLock.EnterReadLock(); + + try + { + return !CacheDataExpired ? m_clientAccessMapping : null; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + } + + public AccessMapping DefaultAccessMapping + { + get + { + m_accessLock.EnterReadLock(); + + try + { + return !CacheDataExpired ? m_defaultAccessMapping : null; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + } + + public String WebApplicationRelativeDirectory + { + get + { + return m_webApplicationRelativeDirectory; + } + set + { + m_webApplicationRelativeDirectory = String.IsNullOrEmpty(value) ? m_webApplicationRelativeDirectory : value; + } + } + + public void ClearIfCacheNotFresh(Int32 serverLastChangeId) + { + if (serverLastChangeId != m_lastChangeId) + { + m_accessLock.EnterWriteLock(); + + try + { + if (serverLastChangeId != m_lastChangeId) + { + m_accessMappings.Clear(); + m_services.Clear(); + m_cachedMisses.Clear(); + m_lastChangeId = -1; + m_cacheExpirationDate = DateTime.MinValue; + } + } + finally + { + m_accessLock.ExitWriteLock(); + } + } + } + + /// + /// Removes services from both the in-memory cache and the disk cache. + /// + /// The service definitions to remove. + /// The lastChangeId the server returned when + /// it performed this operation. + public void RemoveServices(IEnumerable serviceDefinitions, Int32 lastChangeId) + { + EnsureDiskCacheLoaded(); + + m_accessLock.EnterWriteLock(); + + try + { + foreach (ServiceDefinition serviceDefinition in serviceDefinitions) + { + Dictionary definitions = null; + if (!m_services.TryGetValue(serviceDefinition.ServiceType, out definitions)) + { + continue; + } + + // If the entry is removed and there are no more definitions of this type, remove that + // entry from the services structure + if (definitions.Remove(serviceDefinition.Identifier) && definitions.Count == 0) + { + m_services.Remove(serviceDefinition.ServiceType); + } + } + + SetLastChangeId(lastChangeId, false); + Debug.Assert(m_lastChangeId == -1 || m_services.Count > 0); + WriteCacheToDisk(); + } + finally + { + m_accessLock.ExitWriteLock(); + } + } + + /// + /// Returns the access mapping for the provided moniker. + /// + /// The moniker of the access mapping to + /// return. + /// The access mapping for the provided moniker or null + /// if an access mapping for the moniker doesn't exist.. + public AccessMapping GetAccessMapping(String moniker) + { + ArgumentUtility.CheckStringForNullOrEmpty(moniker, "moniker"); + EnsureDiskCacheLoaded(); + m_accessLock.EnterReadLock(); + + try + { + if (CacheDataExpired) + { + return null; + } + + AccessMapping accessMapping; + m_accessMappings.TryGetValue(moniker, out accessMapping); + + return accessMapping; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + + /// + /// Returns the service definition for the service with the + /// provided service type and identifier. Null will be returned + /// if there is no entry in the cache for this service. + /// + /// The service type we are looking for. + /// The identifier for the specific + /// service instance we are looking for. + /// The service definition for the service with the + /// provided service type and identifier. Null will be returned + /// if there is no entry in the cache for this service. + public Boolean TryFindService(String serviceType, Guid serviceIdentifier, out ServiceDefinition serviceDefinition) + { + EnsureDiskCacheLoaded(); + m_accessLock.EnterReadLock(); + + try + { + Dictionary services = null; + serviceDefinition = null; + + if (CacheDataExpired) + { + return false; + } + + if (m_services.TryGetValue(serviceType, out services)) + { + if (services.TryGetValue(serviceIdentifier, out serviceDefinition)) + { + return true; + } + } + + // Look in our cachedMisses to see if we can find it there. + if (m_cachedMisses.Contains(BuildCacheMissString(serviceType, serviceIdentifier))) + { + // We found an entry in cached misses so return true. + return true; + } + + return false; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + + /// + /// Finds all services with the provided service type. + /// + /// The service type we are looking for. + /// All of the service definitions with the serviceType that + /// are in the cache or null if none are in the cache. + public IEnumerable FindServices(String serviceType) + { + EnsureDiskCacheLoaded(); + m_accessLock.EnterReadLock(); + + try + { + + Debug.Assert(m_lastChangeId == -1 || m_services.Count > 0); + + if (CacheDataExpired) + { + return null; + } + + // We either have all of the services or none. If we have none then return null. + if (m_services.Count == 0) + { + return null; + } + + // If service type is null, return all services as long as we know + // that we have all of the services + IEnumerable> dictionaries; + if (String.IsNullOrEmpty(serviceType)) + { + dictionaries = m_services.Values; + } + else + { + Dictionary services = null; + if (!m_services.TryGetValue(serviceType, out services)) + { + return null; + } + + dictionaries = new Dictionary[] { services }; + } + + // Make a copy of all of the service definitions to pass back. + List serviceDefinitions = new List(); + foreach (Dictionary dict in dictionaries) + { + foreach (ServiceDefinition definition in dict.Values) + { + serviceDefinitions.Add(definition.Clone()); + } + } + + return serviceDefinitions; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + + /// + /// Loads the service data into the in-memory cache and writes the values to disk. + /// + /// The data to write to the cache. + /// Copies of the service definitions created by this load + public void LoadServicesData(LocationServiceData locationServiceData, Boolean allServicesIncluded) + { + m_accessLock.EnterWriteLock(); + + try + { + // If the server is telling us our client cache isn't fresh and we agree + // with it, clear the storage. The reason we check to see if we agree with it + // is that because of the way we cache based on filters, we may sometimes + // tell the server that our last change id is -1 because we don't have a given + // filter cached. In this case, the server will tell us our cache is out of + // date even though it isn't. + if (!locationServiceData.ClientCacheFresh && locationServiceData.LastChangeId != m_lastChangeId) + { + m_accessMappings = new Dictionary(VssStringComparer.AccessMappingMoniker); + m_services = new Dictionary>(VssStringComparer.ServiceType); + m_cachedMisses = new HashSet(VssStringComparer.ServiceType); + m_lastChangeId = -1; + m_cacheExpirationDate = DateTime.MinValue; + } + else + { + EnsureDiskCacheLoadedHelper(); + } + + // We have to update the lastChangeId outside of the above if check because there + // are cases such as a register service where we cause the lastChangeId to be incremented + // and our cache isn't out of date. + SetLastChangeId(locationServiceData.LastChangeId, allServicesIncluded); + + // Use the client value if provided (this lets clients override the server) + // otherwise just use the server specified TTL. + Int32 clientCacheTimeToLive = (ClientCacheTimeToLive != null) ? ClientCacheTimeToLive.Value : locationServiceData.ClientCacheTimeToLive; + m_cacheExpirationDate = DateTime.UtcNow.AddSeconds(clientCacheTimeToLive); + + ICollection accessMappings = locationServiceData.AccessMappings; + if (accessMappings != null && accessMappings.Count > 0) + { + // Get all of the access mappings + foreach (AccessMapping accessMapping in accessMappings) + { + // We can't remove this compat code from the client library yet since + // we still support newer clients talking to older TFS servers + // which might not send VirtualDirectory + // Older server means earlier than TFS 2015 CU2 + if (accessMapping.VirtualDirectory == null && + !String.IsNullOrEmpty(WebApplicationRelativeDirectory)) + { + String absoluteUriTrimmed = accessMapping.AccessPoint.TrimEnd('/'); + String relativeDirectoryTrimmed = WebApplicationRelativeDirectory.TrimEnd('/'); + + if (VssStringComparer.ServerUrl.EndsWith(absoluteUriTrimmed, relativeDirectoryTrimmed)) + { + accessMapping.AccessPoint = absoluteUriTrimmed.Substring(0, absoluteUriTrimmed.Length - relativeDirectoryTrimmed.Length); + } + } + + // if we can find it, update the values so the objects that reference this + // access mapping are updated as well + AccessMapping existingAccessMapping; + if (m_accessMappings.TryGetValue(accessMapping.Moniker, out existingAccessMapping)) + { + existingAccessMapping.DisplayName = accessMapping.DisplayName; + existingAccessMapping.AccessPoint = accessMapping.AccessPoint; + existingAccessMapping.VirtualDirectory = accessMapping.VirtualDirectory; + } + else + { + // We didn't find it, so just set it + existingAccessMapping = accessMapping; + m_accessMappings[accessMapping.Moniker] = accessMapping; + } + } + + DetermineClientAndDefaultZones(locationServiceData.DefaultAccessMappingMoniker); + } + + if (locationServiceData.ServiceDefinitions != null) + { + // Get all of the services + foreach (ServiceDefinition definition in locationServiceData.ServiceDefinitions) + { + Dictionary definitions = null; + if (!m_services.TryGetValue(definition.ServiceType, out definitions)) + { + definitions = new Dictionary(); + m_services[definition.ServiceType] = definitions; + } + + definitions[definition.Identifier] = definition; + } + } + + // Even if the cache file wasn't previously available, let's give ourselves another opportunity to update the cache. + m_cacheAvailable = true; + WriteCacheToDisk(); + } + finally + { + Debug.Assert(m_lastChangeId == -1 || m_services.Count > 0); + + m_accessLock.ExitWriteLock(); + } + } + + private void DetermineClientAndDefaultZones(String defaultAccessMappingMoniker) + { + Debug.Assert(m_accessLock.IsWriteLockHeld); + + m_defaultAccessMapping = null; + m_clientAccessMapping = null; + + // For comparisons below we MUST use .ToString() here instead of .AbsoluteUri. .AbsoluteUri will return the path + // portion of the query string as encoded if it contains characters that are unicode, .ToString() + // will not return them encoded. We must not have them encoded so that the comparison below works + // correctly. Also, we do not need to worry about the downfalls of using ToString() instead of AbsoluteUri + // here because any urls that are generated with the generated access point will be placed back into a + // Uri object before they are used in a web request. + String relativeDirectoryTrimmed = (WebApplicationRelativeDirectory != null) ? WebApplicationRelativeDirectory.TrimEnd('/') : String.Empty; + + foreach (AccessMapping accessMapping in m_accessMappings.Values) + { + if (VssStringComparer.ServerUrl.StartsWith(m_connectionBaseUrl.ToString(), accessMapping.AccessPoint.TrimEnd('/')) && + (accessMapping.VirtualDirectory == null || + VssStringComparer.UrlPath.Equals(accessMapping.VirtualDirectory, relativeDirectoryTrimmed))) + { + m_clientAccessMapping = accessMapping; + } + } + + m_defaultAccessMapping = m_accessMappings[defaultAccessMappingMoniker]; + + if (m_clientAccessMapping == null) + { + String accessPoint = m_connectionBaseUrl.ToString().TrimEnd('/'); + String virtualDirectory = String.Empty; + + if (!String.IsNullOrEmpty(WebApplicationRelativeDirectory)) + { + if (VssStringComparer.ServerUrl.EndsWith(accessPoint, relativeDirectoryTrimmed)) + { + accessPoint = accessPoint.Substring(0, accessPoint.Length - relativeDirectoryTrimmed.Length); + virtualDirectory = relativeDirectoryTrimmed; + } + } + + // Looks like we are in an unregistered zone, make up our own. + m_clientAccessMapping = new AccessMapping() + { + Moniker = accessPoint, + DisplayName = accessPoint, + AccessPoint = accessPoint, + VirtualDirectory = virtualDirectory + }; + } + } + + /// + /// Returns the AccessMappings that this location service cache knows about. + /// Note that each time this property is accessed, the list is copied and + /// returned. + /// + public IEnumerable AccessMappings + { + get + { + EnsureDiskCacheLoaded(); + m_accessLock.EnterReadLock(); + + try + { + // return a copy to prevent race conditions + List accessMappings = new List(); + + if (!CacheDataExpired) + { + foreach (AccessMapping accessMapping in m_accessMappings.Values) + { + accessMappings.Add(accessMapping); + } + } + + return accessMappings; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + } + + /// + /// Removes the access mapping with the provided access mapping moniker + /// and all of the location mapping entries that have this access + /// zone. + /// + /// The moniker of the access mapping to remove. + /// + public void RemoveAccessMapping(String moniker) + { + EnsureDiskCacheLoaded(); + + m_accessLock.EnterWriteLock(); + + try + { + // Remove it from the access mappings + m_accessMappings.Remove(moniker); + + // Remove each instance from the service definitions + foreach (Dictionary serviceGroup in m_services.Values) + { + foreach (ServiceDefinition definition in serviceGroup.Values) + { + // We know that it is illegal to delete an access mapping that is the default access mapping of + // a service definition so we don't have to update any of those values. + + // Remove the mapping that has the removed access mapping + for (int i = 0; i < definition.LocationMappings.Count; i++) + { + // If this one needs to be removed, swap it with the end and update the end counter + if (VssStringComparer.AccessMappingMoniker.Equals(moniker, definition.LocationMappings[i].AccessMappingMoniker)) + { + definition.LocationMappings.RemoveAt(i); + break; + } + } + } + } + + WriteCacheToDisk(); + } + finally + { + m_accessLock.ExitWriteLock(); + } + } + + /// + /// Adds a cached miss to the location service data, if the last change ID presented + /// matches the current value. + /// + public void AddCachedMiss(String serviceType, Guid serviceIdentifier, int missedLastChangeId) + { + if (missedLastChangeId < 0) + { + return; + } + + EnsureDiskCacheLoaded(); + m_accessLock.EnterWriteLock(); + + try + { + if (missedLastChangeId == m_lastChangeId && + m_cachedMisses.Add(BuildCacheMissString(serviceType, serviceIdentifier))) + { + WriteCacheToDisk(); + } + } + finally + { + m_accessLock.ExitWriteLock(); + } + } + + /// + /// Returns the id of the last change that this cache is aware of. + /// + public Int32 GetLastChangeId() + { + EnsureDiskCacheLoaded(); + m_accessLock.EnterReadLock(); + + try + { + return m_lastChangeId; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + + /// + /// Returns the time of the cache expiration. + /// + /// + internal DateTime GetCacheExpirationDate() + { + EnsureDiskCacheLoaded(); + + m_accessLock.EnterReadLock(); + + try + { + return m_cacheExpirationDate; + } + finally + { + m_accessLock.ExitReadLock(); + } + } + + private void SetLastChangeId(Int32 lastChangeId, Boolean allServicesUpdated) + { + Debug.Assert(m_accessLock.IsWriteLockHeld); + + if (m_lastChangeId != -1 || allServicesUpdated) + { + // We only update our last change id if the last change id was valid before + // and this is an incremental update or this data includes all services. + m_lastChangeId = lastChangeId; + } + } + + private static String BuildCacheMissString(String serviceType, Guid serviceIdentifier) + { + return String.Concat(serviceType, "_", serviceIdentifier.ToString()); + } + + /// + /// + /// + internal void EnsureDiskCacheLoaded() + { + if (m_cacheLocallyFresh || !m_cacheAvailable) + { + return; + } + + m_accessLock.EnterWriteLock(); + + try + { + EnsureDiskCacheLoadedHelper(); + } + finally + { + m_accessLock.ExitWriteLock(); + } + } + + private void EnsureDiskCacheLoadedHelper() + { + Debug.Assert(m_accessLock.IsWriteLockHeld); + + FileStream unusedFile = null; + + try + { + if (m_cacheLocallyFresh || !m_cacheAvailable) + { + return; + } + + // actually load the cache from disk + // Open the file, allowing concurrent reads. + // Do not create the file if it does not exist. + XmlDocument document = XmlUtility.OpenXmlFile(out unusedFile, m_cacheFilePath, FileShare.Read, saveFile: false); + + if (document != null) + { + m_accessMappings = new Dictionary(VssStringComparer.AccessMappingMoniker); + m_services = new Dictionary>(VssStringComparer.ServiceType); + m_cachedMisses = new HashSet(VssStringComparer.ServiceType); + + // There is an existing cache, load it + m_lastChangeId = m_locationXmlOperator.ReadLastChangeId(document); + m_cacheExpirationDate = m_locationXmlOperator.ReadCacheExpirationDate(document); + String defaultAccessMappingMoniker = m_locationXmlOperator.ReadDefaultAccessMappingMoniker(document); + m_webApplicationRelativeDirectory = m_locationXmlOperator.ReadVirtualDirectory(document); + + // Read and organize the access mappings + List accessMappings = m_locationXmlOperator.ReadAccessMappings(document); + foreach (AccessMapping accessMapping in accessMappings) + { + m_accessMappings[accessMapping.Moniker] = accessMapping; + } + + if (accessMappings.Count > 0) + { + DetermineClientAndDefaultZones(defaultAccessMappingMoniker); + } + else + { + m_cacheAvailable = false; + m_lastChangeId = -1; + return; + } + + // Read and organize the service definitions + List serviceDefinitions = m_locationXmlOperator.ReadServices(document, m_accessMappings); + foreach (ServiceDefinition definition in serviceDefinitions) + { + Dictionary serviceTypeSet; + if (!m_services.TryGetValue(definition.ServiceType, out serviceTypeSet)) + { + serviceTypeSet = new Dictionary(); + m_services.Add(definition.ServiceType, serviceTypeSet); + } + + serviceTypeSet[definition.Identifier] = definition; + } + + List cachedMisses = m_locationXmlOperator.ReadCachedMisses(document); + foreach (String cachedMiss in cachedMisses) + { + m_cachedMisses.Add(cachedMiss); + } + } + + // Hook up the file system watcher if we haven't already + if (m_fileSystemWatcher == null) + { + m_fileSystemWatcher = new FileSystemWatcher(Path.GetDirectoryName(m_cacheFilePath), s_cacheFileName); + m_fileSystemWatcher.NotifyFilter = NotifyFilters.LastWrite; + m_fileSystemWatcher.Changed += new FileSystemEventHandler(m_fileSystemWatcher_Changed); + } + + Debug.Assert(m_lastChangeId == -1 || m_services.Count > 0); + } + catch (Exception) + { + // It looks as though we don't have access to the cache file. Eat + // this exception and mark the cache as unavailable so we don't + // repeatedly try to access it + m_cacheAvailable = false; + m_lastChangeId = -1; + } + finally + { + m_cacheLocallyFresh = true; + + if (unusedFile != null) + { + unusedFile.Dispose(); + } + } + } + + /// + /// + /// + /// + /// + void m_fileSystemWatcher_Changed(object sender, FileSystemEventArgs e) + { + m_cacheLocallyFresh = false; + } + + /// + /// Writes the cache to disk. Callers of this function should have a writer + /// lock. + /// + private void WriteCacheToDisk() + { + Debug.Assert(m_accessLock.IsWriteLockHeld); + + if (!m_cacheAvailable) + { + return; + } + + try + { + Debug.Assert(m_lastChangeId == -1 || m_services.Count > 0); + // Get an exclusive lock on the file + using (FileStream file = XmlUtility.OpenFile(m_cacheFilePath, FileShare.None, true)) + { + XmlDocument document = new XmlDocument(); + + XmlNode documentNode = document.CreateNode(XmlNodeType.Element, s_docStartElement, null); + document.AppendChild(documentNode); + + m_locationXmlOperator.WriteLastChangeId(documentNode, m_lastChangeId); + m_locationXmlOperator.WriteCacheExpirationDate(documentNode, m_cacheExpirationDate); + m_locationXmlOperator.WriteDefaultAccessMappingMoniker(documentNode, m_defaultAccessMapping.Moniker); + m_locationXmlOperator.WriteVirtualDirectory(documentNode, m_webApplicationRelativeDirectory); + m_locationXmlOperator.WriteAccessMappings(documentNode, m_accessMappings.Values); + + // Build up a list of the service definitions for writing + List serviceDefinitions = new List(); + foreach (Dictionary serviceTypeSet in m_services.Values) + { + serviceDefinitions.AddRange(serviceTypeSet.Values); + } + + m_locationXmlOperator.WriteServices(documentNode, serviceDefinitions); + m_locationXmlOperator.WriteCachedMisses(documentNode, m_cachedMisses); + + // Reset the file stream. + file.SetLength(0); + file.Position = 0; + + // Save the file. + document.Save(file); + } + } + catch (Exception) + { + // It looks as though we don't have access to the cache file. Eat + // this exception and mark the cache as unavailable so we don't + // repeatedly try to access it + m_cacheAvailable = false; + } + } + + /// + /// This setting controls the amount of time before the cache expires + /// + internal Int32? ClientCacheTimeToLive + { + get; + set; + } + + /// + /// This is the set of services available from this service location + /// service. + /// + private Dictionary> m_services; + + /// + /// This is the set of services that have been queried since our last update + /// from the server that we know don't exist. + /// + private HashSet m_cachedMisses; + + /// + /// Keeps track of all access mappings that have been given to us by the server. + /// The key is their identifier. + /// + private Dictionary m_accessMappings; + + /// + /// Keeps track of the lastChangeId for the last change that was put in this cache. + /// + private Int32 m_lastChangeId; + + /// + /// The time after which the local cache data is invalid. This is used to prematurely expire the client cache + /// even if we don't know (yet) whether or not the server changed. By expiring the client cache we + /// can ensure that clients will be forced to check for server updates periodically rather than relying on the + /// client cache indefinitely in the degenerate case where no client tools ever explicitly call Connect() (such as tf.exe) + /// + private DateTime m_cacheExpirationDate; + + /// + /// This is used to protect the services in-memory store. + /// + private ReaderWriterLockSlim m_accessLock = new ReaderWriterLockSlim(LockRecursionPolicy.NoRecursion); + + private String m_webApplicationRelativeDirectory; + + /// + /// Only let one process write to a cache at a time. + /// + private static Object s_cacheMutex = new Object(); + + /// + /// This object is used to keep track of whether or not our cache is fresh + /// with respect to what we have on disk. + /// + private Boolean m_cacheLocallyFresh; + + /// + /// This is true if we do not have access to the cache file + /// + private Boolean m_cacheAvailable; + + /// + /// This is used to watch for others changing our cache so we can respond to + /// those changes + /// + private FileSystemWatcher m_fileSystemWatcher; + + private Uri m_connectionBaseUrl; + + /// + /// The two calculated access mappings that this manager caches. + /// + private AccessMapping m_clientAccessMapping; + private AccessMapping m_defaultAccessMapping; + + /// + /// persistent cache file name values + /// + private static readonly String s_cacheFileName = "LocationServiceData.config"; + private String m_cacheFilePath; + + private LocationXmlOperator m_locationXmlOperator; + + /// + /// xml document related constants + /// + private const String s_docStartElement = "LocationServiceConfiguration"; + } +} diff --git a/src/Sdk/WebApi/WebApi/Location/LocationServerMapCache.cs b/src/Sdk/WebApi/WebApi/Location/LocationServerMapCache.cs new file mode 100644 index 00000000000..4ae35563445 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Location/LocationServerMapCache.cs @@ -0,0 +1,424 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Xml; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.WebApi.Location +{ + /// + /// + /// + internal class ServerMapData + { + /// + /// + /// + public ServerMapData() + : this(Guid.Empty, Guid.Empty) + { + } + + /// + /// + /// + /// + /// + public ServerMapData(Guid serverId, Guid serviceOwner) + { + ServerId = serverId; + ServiceOwner = serviceOwner; + } + + /// + /// + /// + public Guid ServerId + { + get; + set; + } + + /// + /// + /// + public Guid ServiceOwner + { + get; + set; + } + } + + internal static class LocationServerMapCache + { + /// + /// Finds the location for the specified guid. If it is not found, null + /// is returned. + /// + /// The server instance id associated with the + /// desired location service url. + /// The location of the location service for this server or null + /// if the guid is not found. + public static String ReadServerLocation(Guid serverId, Guid serviceOwner) + { + try + { + EnsureCacheLoaded(); + s_accessLock.EnterReadLock(); + + // Iterate through the dictionary to find the location we are looking for + foreach (KeyValuePair pair in s_serverMappings) + { + if (Guid.Equals(serverId, pair.Value.ServerId) && + Guid.Equals(serviceOwner, pair.Value.ServiceOwner)) + { + return pair.Key; + } + } + + return null; + } + finally + { + if (s_accessLock.IsReadLockHeld) + { + s_accessLock.ExitReadLock(); + } + } + } + + /// + /// + /// + /// + /// The guid for this location or Guid.Empty if the location + /// does not have an entry. + public static ServerMapData ReadServerData(String location) + { + try + { + EnsureCacheLoaded(); + s_accessLock.EnterReadLock(); + + ServerMapData serverData; + if (!s_serverMappings.TryGetValue(location, out serverData)) + { + return new ServerMapData(); + } + + return serverData; + } + finally + { + if (s_accessLock.IsReadLockHeld) + { + s_accessLock.ExitReadLock(); + } + } + } + + /// + /// If this call is not a change, nothing will be done. + /// + /// + /// + /// + /// True if this is the first time the mapping was written. + public static Boolean EnsureServerMappingExists(String location, Guid serverId, Guid serviceOwner) + { + try + { + EnsureCacheLoaded(); + s_accessLock.EnterWriteLock(); + + // See if this is an update or an add to optimize writing the disk. + Boolean isNew = true; + ServerMapData storedData; + if (s_serverMappings.TryGetValue(location, out storedData)) + { + if (storedData.ServerId.Equals(serverId) && + storedData.ServiceOwner.Equals(serviceOwner)) + { + return false; + } + isNew = false; + } + + // Make the change in the cache + s_serverMappings[location] = new ServerMapData(serverId, serviceOwner); + + s_accessLock.ExitWriteLock(); + + // Persist the change + return TryWriteMappingToDisk(location, serverId, serviceOwner, isNew); + } + finally + { + if (s_accessLock.IsWriteLockHeld) + { + s_accessLock.ExitWriteLock(); + } + } + } + + private static void EnsureCacheLoaded() + { + if (s_cacheFreshLocally || s_cacheUnavailable) + { + return; + } + + FileStream file = null; + + try + { + s_accessLock.EnterWriteLock(); + + if (s_cacheFreshLocally) + { + return; + } + + // actually load the cache from disk + // Open the file, allowing concurrent reads. + // Do not create the file if it does not exist. + XmlDocument document = XmlUtility.OpenXmlFile(out file, FilePath, FileShare.Read, false); + + if (document != null) + { + // This is an existing document, get the root node + XmlNode documentNode = document.ChildNodes[0]; + + // Load all of the mappings + foreach (XmlNode mappingNode in documentNode.ChildNodes) + { + String location = mappingNode.Attributes[s_locationAttribute].InnerText; + Guid guid = XmlConvert.ToGuid(mappingNode.Attributes[s_guidAttribute].InnerText); + + // Legacy server case: Don't error out if the existing file doesn't have the owner attribute. + // Once the server is updated the next connect call should update this record + Guid serviceOwner = Guid.Empty; + if (mappingNode.Attributes[s_ownerAttribute] != null) + { + serviceOwner = XmlConvert.ToGuid(mappingNode.Attributes[s_ownerAttribute].InnerText); + } + + // If the service owner is absent, then the server is on-prem + if (Guid.Empty == serviceOwner) + { + serviceOwner = ServiceInstanceTypes.TFSOnPremises; + } + + s_serverMappings[location] = new ServerMapData(guid, serviceOwner); + } + } + + // Hook up the file system watcher so we know if we need to invalidate our cache + if (s_fileWatcher == null) + { + String directoryToWatch = VssClientSettings.ClientCacheDirectory; + + // Ensure the directory exists, otherwise FileSystemWatcher will throw. + if (!Directory.Exists(directoryToWatch)) + { + Directory.CreateDirectory(directoryToWatch); + } + + s_fileWatcher = new FileSystemWatcher(directoryToWatch, s_fileName); + s_fileWatcher.NotifyFilter = NotifyFilters.LastWrite; + s_fileWatcher.Changed += new FileSystemEventHandler(s_fileWatcher_Changed); + } + } + catch (Exception) + { + // It looks like something is wrong witht he cahce, lets just hide this + // exception and work without it. + s_cacheUnavailable = true; + } + finally + { + s_cacheFreshLocally = true; + + if (file != null) + { + file.Close(); + } + + if (s_accessLock.IsWriteLockHeld) + { + s_accessLock.ExitWriteLock(); + } + } + } + + static void s_fileWatcher_Changed(object sender, FileSystemEventArgs e) + { + s_cacheFreshLocally = false; + } + + /// + /// Writes the mapping to disk if the cache is available. + /// + /// + /// + /// + /// True if the write succeeded + private static Boolean TryWriteMappingToDisk(String location, Guid serverGuid, Guid serviceOwner, Boolean isNew) + { + if (s_cacheUnavailable) + { + return false; + } + + FileStream file = null; + + try + { + // Open the file with an exclusive lock + XmlDocument existingDocument = XmlUtility.OpenXmlFile(out file, FilePath, FileShare.None, true); + + // Only allow one writer at a time + lock (s_cacheMutex) + { + XmlNode documentNode = null; + if (existingDocument == null) + { + // This is a new document, create the xml + existingDocument = new XmlDocument(); + + // This is the first entry, create the document node and add the child + documentNode = existingDocument.CreateNode(XmlNodeType.Element, s_documentXmlText, null); + existingDocument.AppendChild(documentNode); + + AddMappingNode(documentNode, location, serverGuid, serviceOwner); + } + else + { + // Get the root document node + documentNode = existingDocument.ChildNodes[0]; + + // If this is a new mapping, just add it to the document node + if (isNew) + { + AddMappingNode(documentNode, location, serverGuid, serviceOwner); + } + else + { + // This is some form of update. Find the node with the location and update + // the guid. + foreach (XmlNode mappingNode in documentNode.ChildNodes) + { + if (StringComparer.OrdinalIgnoreCase.Equals(mappingNode.Attributes[s_locationAttribute].InnerText, location)) + { + // This is the one we have to update, do so now + mappingNode.Attributes[s_guidAttribute].InnerText = XmlConvert.ToString(serverGuid); + + // For compatibility with older OMs with the same major version, persist the on-prem service owner as empty. + if (ServiceInstanceTypes.TFSOnPremises == serviceOwner) + { + serviceOwner = Guid.Empty; + } + + // Legacy server case: Let's be resilient to the persisted document not already having an owner attribute + XmlAttribute ownerAttribute = existingDocument.CreateAttribute(s_ownerAttribute); + ownerAttribute.InnerText = XmlConvert.ToString(serviceOwner); + mappingNode.Attributes.Append(ownerAttribute); + } + } + } + } + + // Reset the file stream. + file.SetLength(0); + file.Position = 0; + + // Save the file. + existingDocument.Save(file); + + return true; + } + } + catch (Exception) + { + // It looks like we are being denied access to the cache, lets just hide this + // exception and work without it. + s_cacheUnavailable = true; + return false; + } + finally + { + if (file != null) + { + file.Close(); + } + } + } + + private static void AddMappingNode(XmlNode parentNode, String location, Guid guid, Guid owner) + { + XmlNode mappingNode = parentNode.OwnerDocument.CreateNode(XmlNodeType.Element, s_mappingXmlText, null); + parentNode.AppendChild(mappingNode); + + // Write the mapping as attributes + XmlUtility.AddXmlAttribute(mappingNode, s_locationAttribute, location); + XmlUtility.AddXmlAttribute(mappingNode, s_guidAttribute, XmlConvert.ToString(guid)); + + // For compatibility with older OMs with the same major version, persist the on-prem service owner as empty. + if (ServiceInstanceTypes.TFSOnPremises == owner) + { + owner = Guid.Empty; + } + + // Legacy server case: If the server did not send back ServiceOwner in the connectionData + // let's just do what we used to do to not break anything. + // Eventually we can remove this if-guard + if (owner != Guid.Empty) + { + XmlUtility.AddXmlAttribute(mappingNode, s_ownerAttribute, XmlConvert.ToString(owner)); + } + } + + private static String FilePath + { + get + { + if (s_filePath == null) + { + s_filePath = Path.Combine(VssClientSettings.ClientCacheDirectory, s_fileName); + } + + return s_filePath; + } + } + + private static ReaderWriterLockSlim s_accessLock = new ReaderWriterLockSlim(); + + private static Dictionary s_serverMappings = new Dictionary(StringComparer.OrdinalIgnoreCase); + + private static String s_filePath; + + private static FileSystemWatcher s_fileWatcher; + + /// + /// This is used to keep track of whether or not our in-memory cache is fresh with regards + /// to our persistent cache on disk. + /// + private static Boolean s_cacheFreshLocally = false; + + /// + /// This is true if we do not have access to the cache file + /// + private static Boolean s_cacheUnavailable = false; + + private static readonly String s_fileName = "LocationServerMap.xml"; + private static readonly String s_documentXmlText = "LocationServerMappings"; + private static readonly String s_mappingXmlText = "ServerMapping"; + private static readonly String s_locationAttribute = "location"; + private static readonly String s_guidAttribute = "guid"; + private static readonly String s_ownerAttribute = "owner"; + + private static Object s_cacheMutex = new Object(); + } +} diff --git a/src/Sdk/WebApi/WebApi/Location/LocationService.cs b/src/Sdk/WebApi/WebApi/Location/LocationService.cs new file mode 100644 index 00000000000..f5839d39146 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Location/LocationService.cs @@ -0,0 +1,281 @@ +using System; +using System.Collections.Concurrent; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common.Internal; +using GitHub.Services.Location; + +namespace GitHub.Services.WebApi.Location +{ + /// + /// + /// + internal class LocationService : ILocationService + { + /// + /// + /// + /// + public virtual void Initialize( + VssConnection connection) + { + m_connection = connection; + } + + /// + /// + /// + /// + /// + public ILocationDataProvider GetLocationData( + Guid locationAreaIdentifier) + { + return GetLocationDataAsync(locationAreaIdentifier).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + public async Task GetLocationDataAsync( + Guid locationAreaIdentifier, + CancellationToken cancellationToken = default(CancellationToken)) + { + if (locationAreaIdentifier == Guid.Empty || + locationAreaIdentifier == LocationServiceConstants.SelfReferenceIdentifier) + { + return LocalDataProvider; + } + else + { + // These methods might make a server call but generally it will be accessing cached data + Guid instanceId = await LocalDataProvider.GetInstanceIdAsync(cancellationToken).ConfigureAwait(false); + Guid instanceType = await LocalDataProvider.GetInstanceTypeAsync(cancellationToken).ConfigureAwait(false); + + if (locationAreaIdentifier == instanceId || + locationAreaIdentifier == instanceType || + instanceType == ServiceInstanceTypes.TFSOnPremises) + { + // Never do location traversal for OnPrem + return LocalDataProvider; + } + else + { + return await ResolveLocationDataAsync(locationAreaIdentifier, cancellationToken).ConfigureAwait(false); + } + } + } + + /// + /// + /// + /// + /// + /// + private async Task ResolveLocationDataAsync( + Guid locationAreaIdentifier, + CancellationToken cancellationToken = default(CancellationToken)) + { + ILocationDataProvider locationData = null; + ProviderCache providerLookup = m_providerLookup; + + if (providerLookup == null) + { + providerLookup = new ProviderCache(); + + // Create and seed the cache with the local url + String location = await LocalDataProvider.LocationForCurrentConnectionAsync( + ServiceInterfaces.LocationService2, + LocationServiceConstants.SelfReferenceIdentifier, + cancellationToken).ConfigureAwait(false); + + if (location != null) + { + providerLookup.GetOrAdd(location, LocalDataProvider); + } + + ProviderCache actualProvider = Interlocked.CompareExchange(ref m_providerLookup, providerLookup, null); + + // Did we lose the race? Pick the winner + if (actualProvider != null) + { + providerLookup = actualProvider; + } + } + + if (!providerLookup.TryGetValue(locationAreaIdentifier, out locationData)) + { + // First, check our current provider (see if a direct pointer is registered) + String location = await LocalDataProvider.LocationForCurrentConnectionAsync( + ServiceInterfaces.LocationService2, + locationAreaIdentifier, + cancellationToken).ConfigureAwait(false); + + // Next, check and see if we have a root pointer + if (location == null && + locationAreaIdentifier != LocationServiceConstants.ApplicationIdentifier && + locationAreaIdentifier != LocationServiceConstants.RootIdentifier) // Don't infinitely recurse + { + ILocationDataProvider rootProvider = await ResolveLocationDataAsync( + LocationServiceConstants.RootIdentifier, + cancellationToken).ConfigureAwait(false); + + if (rootProvider != null && + !Object.ReferenceEquals(rootProvider, LocalDataProvider)) + { + location = await rootProvider.LocationForCurrentConnectionAsync( + ServiceInterfaces.LocationService2, + locationAreaIdentifier, + cancellationToken).ConfigureAwait(false); + } + } + + if (location != null) + { + // The caller could be asking for a serviceIdentifier which resolves to a URL + // for which we already have a cached provider. + // This is typical when serviceIdentifier is a ResourceArea guid. + if (!providerLookup.TryGetValue(location, out locationData)) + { + locationData = await CreateDataProviderAsync(location, cancellationToken).ConfigureAwait(false); + locationData = providerLookup.GetOrAdd(location, locationData); + } + + providerLookup[locationAreaIdentifier] = locationData; + } + } + + return locationData; + } + + /// + /// + /// + /// + /// + public String GetLocationServiceUrl( + Guid locationAreaIdentifier) + { + return GetLocationServiceUrlAsync(locationAreaIdentifier, null).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + public String GetLocationServiceUrl( + Guid locationAreaIdentifier, + String accessMappingMoniker = null) + { + return GetLocationServiceUrlAsync(locationAreaIdentifier, accessMappingMoniker).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + /// + public async Task GetLocationServiceUrlAsync( + Guid locationAreaIdentifier, + String accessMappingMoniker = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + ILocationDataProvider locationData = await GetLocationDataAsync(locationAreaIdentifier, cancellationToken).ConfigureAwait(false); + + if (locationData == null) + { + return null; + } + + AccessMapping accessMapping = await locationData.GetAccessMappingAsync(accessMappingMoniker ?? AccessMappingConstants.PublicAccessMappingMoniker).ConfigureAwait(false); + + if (accessMapping == null) + { + accessMapping = await locationData.GetClientAccessMappingAsync().ConfigureAwait(false); + } + + return await locationData.LocationForAccessMappingAsync( + ServiceInterfaces.LocationService2, + LocationServiceConstants.SelfReferenceIdentifier, + accessMapping, + cancellationToken).ConfigureAwait(false); + } + + /// + /// + /// + /// + /// + /// + protected virtual async Task CreateDataProviderAsync( + String location, + CancellationToken cancellationToken = default(CancellationToken)) + { + VssClientHttpRequestSettings locationServiceRequestSettings = VssClientHttpRequestSettings.Default.Clone(); + locationServiceRequestSettings.SendTimeout = TimeSpan.FromSeconds(30); // If not set here, the send timeout will use the default of 100 seconds, which is too long. + VssConnection connection = new VssConnection(new Uri(location), m_connection.Credentials, locationServiceRequestSettings); + IVssServerDataProvider dataProvider = connection.ServerDataProvider; + + // If this provider is connected, then we should make sure the remote provider + // is also up-to-date + if (m_connection.ServerDataProvider.HasConnected) + { + await dataProvider.ConnectAsync(ConnectOptions.None, cancellationToken).ConfigureAwait(false); + } + + return dataProvider; + } + + /// + /// + /// + protected virtual ILocationDataProvider LocalDataProvider + { + get + { + return m_connection.ServerDataProvider; + } + } + + private VssConnection m_connection; + private ProviderCache m_providerLookup; + + private class ProviderCache + { + public Boolean TryGetValue(Guid locationAreaIdentfier, out ILocationDataProvider provider) + { + return m_guidCache.TryGetValue(locationAreaIdentfier, out provider); + } + + public Boolean TryGetValue(String locationUrl, out ILocationDataProvider provider) + { + return m_urlCache.TryGetValue(NormalizeUrl(locationUrl), out provider); + } + + public ILocationDataProvider GetOrAdd(String locationUrl, ILocationDataProvider provider) + { + return m_urlCache.GetOrAdd(NormalizeUrl(locationUrl), provider); + } + + public ILocationDataProvider this[Guid locationAreaIdentifier] + { + get { return m_guidCache[locationAreaIdentifier]; } + set { m_guidCache[locationAreaIdentifier] = value; } + } + + private static String NormalizeUrl(String locationUrl) + { + return UriUtility.AppendSlashToPathIfNeeded(locationUrl); + } + + private ConcurrentDictionary m_guidCache = new ConcurrentDictionary(); + private ConcurrentDictionary m_urlCache = new ConcurrentDictionary(StringComparer.OrdinalIgnoreCase); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Location/LocationXmlOperator.cs b/src/Sdk/WebApi/WebApi/Location/LocationXmlOperator.cs new file mode 100644 index 00000000000..33f442ffe44 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Location/LocationXmlOperator.cs @@ -0,0 +1,601 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Xml; +using GitHub.Services.Common; +using GitHub.Services.Location; +using GitHub.Services.Common.Internal; + +namespace GitHub.Services.WebApi.Location +{ + internal class LocationXmlOperator + { + /// + /// This is to be used for reading in an xml file that contains service definitions that + /// have to be loaded during install + /// + /// True if the parser is parsing xml from a client cache + public LocationXmlOperator(Boolean isClientCache) + { + m_isClientCache = isClientCache; + m_accessMappingLocationServiceUrls = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + /// + /// Reads the service definitions from the provided document. + /// For a specification of what the xml should look like, see the + /// corresponding Write method. + /// + /// The document to read from. + /// A list of service definitions. + public List ReadServices(XmlDocument document, Dictionary accessMappings) + { + List definitions = new List(); + + XmlNodeList servicesNodeList = document.SelectNodes("//" + s_services); + if (servicesNodeList == null) + { + return definitions; + } + + foreach (XmlNode servicesNode in servicesNodeList) + { + // Get all of the service definition nodes + foreach (XmlNode definitionNode in servicesNode.SelectNodes("./" + s_serviceDefinition)) + { + ServiceDefinition definition = new ServiceDefinition(); + + // Get the service type - it must exist + XmlNode serviceTypeNode = definitionNode.SelectSingleNode("./" + s_serviceType); + LocationXmlOperator.CheckXmlNodeNullOrEmpty(serviceTypeNode, s_serviceType, definitionNode); + definition.ServiceType = serviceTypeNode.InnerText; + + // Get the identifier if it exists - it must exist if this is the client cache + XmlNode identifierNode = definitionNode.SelectSingleNode("./" + s_identifier); + if (m_isClientCache) + { + LocationXmlOperator.CheckXmlNodeNullOrEmpty(identifierNode, s_identifier, definitionNode); + } + definition.Identifier = (identifierNode != null) ? XmlConvert.ToGuid(identifierNode.InnerText) : Guid.Empty; + + // Get the display name - it must exist + XmlNode displayNameNode = definitionNode.SelectSingleNode("./" + s_displayName); + LocationXmlOperator.CheckXmlNodeNullOrEmpty(displayNameNode, s_displayName, definitionNode); + definition.DisplayName = displayNameNode.InnerText; + + // Get the description if it exists + XmlNode descriptionNode = definitionNode.SelectSingleNode("./" + s_description); + definition.Description = (descriptionNode != null) ? descriptionNode.InnerText : String.Empty; + + // Get the relativePath and the relativeTo setting + XmlNode relativePathNode = definitionNode.SelectSingleNode("./" + s_relativePath); + LocationXmlOperator.CheckXmlNodeNull(relativePathNode, s_relativePath, definitionNode); + definition.RelativePath = relativePathNode.InnerText; + + // Get the relativeTo setting + XmlAttribute relativeToAttribute = relativePathNode.Attributes[s_relativeTo]; + CheckXmlAttributeNullOrEmpty(relativeToAttribute, s_relativeTo, relativePathNode); + RelativeToSetting setting; + if (!RelativeToEnumCache.GetRelativeToEnums().TryGetValue(relativeToAttribute.InnerText, out setting)) + { + throw new ConfigFileException(relativeToAttribute.InnerText); + } + definition.RelativeToSetting = setting; + + // If the relativeToSetting is FullyQualified and the path is empty, set it to null + // to make the framework happy. + if (definition.RelativeToSetting == RelativeToSetting.FullyQualified && definition.RelativePath == String.Empty) + { + definition.RelativePath = null; + } + + XmlNode parentServiceTypeNode = definitionNode.SelectSingleNode("./" + s_parentServiceType); + definition.ParentServiceType = (parentServiceTypeNode != null) ? parentServiceTypeNode.InnerText : null; + + XmlNode parentIdentifierNode = definitionNode.SelectSingleNode("./" + s_parentIdentifier); + definition.ParentIdentifier = (parentIdentifierNode != null) ? XmlConvert.ToGuid(parentIdentifierNode.InnerText) : Guid.Empty; + + // Get all of the location mappings + definition.LocationMappings = new List(); + if (definition.RelativeToSetting == RelativeToSetting.FullyQualified) + { + XmlNodeList mappings = definitionNode.SelectNodes(".//" + s_locationMapping); + + foreach (XmlNode mappingNode in mappings) + { + LocationMapping locationMapping = new LocationMapping(); + + // Get the accessMapping + XmlNode accessMappingNode = mappingNode.SelectSingleNode("./" + s_accessMapping); + LocationXmlOperator.CheckXmlNodeNullOrEmpty(accessMappingNode, s_accessMapping, mappingNode); + locationMapping.AccessMappingMoniker = accessMappingNode.InnerText; + + // Only process the location code if this is the client cache and there better + // not be a location node if this isn't a client cache. + XmlNode locationNode = mappingNode.SelectSingleNode("./" + s_location); + if (m_isClientCache) + { + CheckXmlNodeNullOrEmpty(locationNode, s_location, mappingNode); + } + + locationMapping.Location = (locationNode != null) ? locationNode.InnerText : null; + + // We will let the caller build the proper location from the proper service definitions + // instead of doing it here. + + definition.LocationMappings.Add(locationMapping); + } + } + + // Get the resourceVersion + XmlNode resourceVersionNode = definitionNode.SelectSingleNode("./" + s_resourceVersion); + definition.ResourceVersion = (resourceVersionNode != null) ? XmlConvert.ToInt32(resourceVersionNode.InnerText) : 0; + + // Get the minVersion + XmlNode minVersionNode = definitionNode.SelectSingleNode("./" + s_minVersion); + definition.MinVersionString = (minVersionNode != null) ? minVersionNode.InnerText : null; + + // Get the maxVersion + XmlNode maxVersionNode = definitionNode.SelectSingleNode("./" + s_maxVersion); + definition.MaxVersionString = (maxVersionNode != null) ? maxVersionNode.InnerText : null; + + // Get the releasedVersion + XmlNode releasedVersionNode = definitionNode.SelectSingleNode("./" + s_releasedVersion); + definition.ReleasedVersionString = (releasedVersionNode != null) ? releasedVersionNode.InnerText : null; + + definitions.Add(definition); + } + } + + return definitions; + } + + public List ReadCachedMisses(XmlDocument document) + { + List cachedMisses = new List(); + + XmlNodeList cachedMissesNodeList = document.SelectNodes("//" + s_cachedMisses); + if (cachedMissesNodeList == null) + { + return cachedMisses; + } + + foreach (XmlNode cachedMissesNode in cachedMissesNodeList) + { + // Get all of the service definition nodes + foreach (XmlNode cachedMissNode in cachedMissesNode.SelectNodes("./" + s_cachedMiss)) + { + cachedMisses.Add(cachedMissNode.InnerText); + } + } + + return cachedMisses; + } + + /// + /// Reads the access mappings from the provided document. + /// For a specification of what the xml should look like, see the + /// corresponding Write method. + /// + /// The document to read from. + /// A list of access mappings. + public List ReadAccessMappings(XmlDocument document) + { + List accessMappings = new List(); + + XmlNodeList accessMappingNodeList = document.SelectNodes("//" + s_accessMappings); + if (accessMappingNodeList == null) + { + return accessMappings; + } + + foreach (XmlNode accessMappingsNode in accessMappingNodeList) + { + foreach (XmlNode accessMappingNode in accessMappingsNode.SelectNodes("./" + s_accessMapping)) + { + AccessMapping accessMapping = new AccessMapping(); + + // Get the moniker + XmlNode monikerNode = accessMappingNode.SelectSingleNode("./" + s_moniker); + CheckXmlNodeNullOrEmpty(monikerNode, s_moniker, accessMappingNode); + accessMapping.Moniker = monikerNode.InnerText; + + // Get the enabled property + XmlNode accessPointNode = accessMappingNode.SelectSingleNode("./" + s_accessPoint); + CheckXmlNodeNullOrEmpty(accessPointNode, s_accessPoint, accessMappingNode); + accessMapping.AccessPoint = accessPointNode.InnerText; + + // Get the displayName property + XmlNode displayNameNode = accessMappingNode.SelectSingleNode("./" + s_displayName); + accessMapping.DisplayName = (displayNameNode != null) ? displayNameNode.InnerText : null; + + XmlNode virtualDirectoryNode = accessMappingNode.SelectSingleNode("./" + s_virtualDirectory); + accessMapping.VirtualDirectory = (virtualDirectoryNode != null) ? virtualDirectoryNode.InnerText : null; + + // If this isn't the client cache, load the location service url + if (!m_isClientCache) + { + XmlNode locationServiceUrlNode = accessMappingNode.SelectSingleNode("./" + s_locationServiceUrl); + String locationServiceUrl = (locationServiceUrlNode != null) ? locationServiceUrlNode.InnerText : String.Empty; + m_accessMappingLocationServiceUrls[accessMapping.Moniker] = locationServiceUrl; + } + + accessMappings.Add(accessMapping); + } + } + + return accessMappings; + } + + /// + /// Reads the last change id from the provided document. + /// For a specification of what the xml should look like, see the + /// corresponding Write method. + /// + /// The document to read from. + /// The last change id. + public Int32 ReadLastChangeId(XmlDocument document) + { + XmlNode lastChangeIdNode = document.SelectSingleNode("//" + s_lastChangeId); + return (lastChangeIdNode != null) ? XmlConvert.ToInt32(lastChangeIdNode.InnerText) : -1; + } + + public DateTime ReadCacheExpirationDate(XmlDocument document) + { + XmlNode cacheExpirationDateNode = document.SelectSingleNode("//" + s_cacheExpirationDate); + return (cacheExpirationDateNode != null) ? XmlConvert.ToDateTime(cacheExpirationDateNode.InnerText, XmlDateTimeSerializationMode.Utc) : DateTime.MinValue; + } + + public String ReadDefaultAccessMappingMoniker(XmlDocument document) + { + XmlNode defaultAccessMappingMonikerNode = document.SelectSingleNode("//" + s_defaultAccessMappingMoniker); + CheckXmlNodeNullOrEmpty(defaultAccessMappingMonikerNode, s_defaultAccessMappingMoniker, document); + return defaultAccessMappingMonikerNode.InnerText; + } + + public String ReadVirtualDirectory(XmlDocument document) + { + XmlNode virtualDirectoryNode = document.SelectSingleNode("//" + s_virtualDirectory); + CheckXmlNodeNull(virtualDirectoryNode, s_virtualDirectory, document); + return virtualDirectoryNode.InnerText; + } + + /// + /// Writes the lastChangeId to the provided document in the form + /// value + /// + /// The document to write to. + /// The value to write. + public void WriteLastChangeId(XmlNode documentNode, Int32 lastChangeId) + { + XmlNode lastChangeIdNode = documentNode.OwnerDocument.CreateNode(XmlNodeType.Element, s_lastChangeId, null); + documentNode.AppendChild(lastChangeIdNode); + lastChangeIdNode.InnerText = XmlConvert.ToString(lastChangeId); + } + + public void WriteCacheExpirationDate(XmlNode documentNode, DateTime cacheExpirationDate) + { + XmlNode cacheExpirationDateNode = documentNode.OwnerDocument.CreateNode(XmlNodeType.Element, s_cacheExpirationDate, null); + documentNode.AppendChild(cacheExpirationDateNode); + cacheExpirationDateNode.InnerText = XmlConvert.ToString(cacheExpirationDate, XmlDateTimeSerializationMode.Utc); + } + + public void WriteDefaultAccessMappingMoniker(XmlNode documentNode, String defaultAccessMappingMoniker) + { + XmlNode defaultAccessMappingMonikerNode = documentNode.OwnerDocument.CreateNode(XmlNodeType.Element, s_defaultAccessMappingMoniker, null); + documentNode.AppendChild(defaultAccessMappingMonikerNode); + defaultAccessMappingMonikerNode.InnerText = defaultAccessMappingMoniker; + } + + public void WriteVirtualDirectory(XmlNode documentNode, String virtualDirectory) + { + XmlNode virtualDirectoryNode = documentNode.OwnerDocument.CreateNode(XmlNodeType.Element, s_virtualDirectory, null); + documentNode.AppendChild(virtualDirectoryNode); + virtualDirectoryNode.InnerText = virtualDirectory; + } + + /// + /// Writes the access mapping information to the provided document in the form: + /// + /// + /// value + /// value + /// value + /// value + /// + /// + /// + /// The document to write to. + /// The values to write. + public void WriteAccessMappings(XmlNode documentNode, IEnumerable accessMappings) + { + XmlDocument document = documentNode.OwnerDocument; + + XmlNode accessMappingsNode = document.CreateNode(XmlNodeType.Element, s_accessMappings, null); + documentNode.AppendChild(accessMappingsNode); + + foreach (AccessMapping accessMapping in accessMappings) + { + XmlNode accessMappingNode = document.CreateNode(XmlNodeType.Element, s_accessMapping, null); + accessMappingsNode.AppendChild(accessMappingNode); + + XmlNode monikerNode = document.CreateNode(XmlNodeType.Element, s_moniker, null); + accessMappingNode.AppendChild(monikerNode); + monikerNode.InnerText = accessMapping.Moniker; + + XmlNode accessPointNode = document.CreateNode(XmlNodeType.Element, s_accessPoint, null); + accessMappingNode.AppendChild(accessPointNode); + accessPointNode.InnerText = accessMapping.AccessPoint; + + XmlNode displayNameNode = document.CreateNode(XmlNodeType.Element, s_displayName, null); + accessMappingNode.AppendChild(displayNameNode); + displayNameNode.InnerText = accessMapping.DisplayName; + + if (accessMapping.VirtualDirectory != null) + { + XmlNode virtualDirectoryNode = document.CreateNode(XmlNodeType.Element, s_virtualDirectory, null); + accessMappingNode.AppendChild(virtualDirectoryNode); + virtualDirectoryNode.InnerText = accessMapping.VirtualDirectory; + } + } + } + + /// + /// Writes service definition information to the provided document in the form: + /// + /// + /// value + /// value + /// value + /// value + /// value + /// + /// + /// value + /// value + /// + /// . + /// . + /// . + /// + /// + /// . + /// . + /// . + /// + /// + /// The document to write to. + /// The values to write + public void WriteServices(XmlNode documentNode, IEnumerable serviceDefinitions) + { + XmlDocument document = documentNode.OwnerDocument; + + XmlNode servicesNode = document.CreateNode(XmlNodeType.Element, s_services, null); + documentNode.AppendChild(servicesNode); + + foreach (ServiceDefinition definition in serviceDefinitions) + { + XmlNode serviceDefinitionNode = document.CreateNode(XmlNodeType.Element, s_serviceDefinition, null); + servicesNode.AppendChild(serviceDefinitionNode); + + XmlNode serviceTypeNode = document.CreateNode(XmlNodeType.Element, s_serviceType, null); + serviceDefinitionNode.AppendChild(serviceTypeNode); + serviceTypeNode.InnerText = definition.ServiceType; + + XmlNode identifierNode = document.CreateNode(XmlNodeType.Element, s_identifier, null); + serviceDefinitionNode.AppendChild(identifierNode); + identifierNode.InnerText = XmlConvert.ToString(definition.Identifier); + + if (definition.DisplayName != null) + { + XmlNode displayNameNode = document.CreateNode(XmlNodeType.Element, s_displayName, null); + serviceDefinitionNode.AppendChild(displayNameNode); + displayNameNode.InnerText = definition.DisplayName; + } + + if (definition.Description != null) + { + XmlNode descriptionNode = document.CreateNode(XmlNodeType.Element, s_description, null); + serviceDefinitionNode.AppendChild(descriptionNode); + descriptionNode.InnerText = definition.Description; + } + + XmlNode relativePathNode = document.CreateNode(XmlNodeType.Element, s_relativePath, null); + serviceDefinitionNode.AppendChild(relativePathNode); + relativePathNode.InnerText = definition.RelativePath; + + XmlUtility.AddXmlAttribute(relativePathNode, s_relativeTo, definition.RelativeToSetting.ToString()); + + XmlNode parentServiceTypeNode = document.CreateNode(XmlNodeType.Element, s_parentServiceType, null); + serviceDefinitionNode.AppendChild(parentServiceTypeNode); + parentServiceTypeNode.InnerText = definition.ParentServiceType; + + XmlNode parentIdentifierNode = document.CreateNode(XmlNodeType.Element, s_parentIdentifier, null); + serviceDefinitionNode.AppendChild(parentIdentifierNode); + parentIdentifierNode.InnerText = XmlConvert.ToString(definition.ParentIdentifier); + + if (definition.RelativeToSetting == RelativeToSetting.FullyQualified) + { + XmlNode locationMappingsNode = document.CreateNode(XmlNodeType.Element, s_locationMappings, null); + serviceDefinitionNode.AppendChild(locationMappingsNode); + + foreach (LocationMapping mapping in definition.LocationMappings) + { + XmlNode locationMappingNode = document.CreateNode(XmlNodeType.Element, s_locationMapping, null); + locationMappingsNode.AppendChild(locationMappingNode); + + XmlNode accessMappingNode = document.CreateNode(XmlNodeType.Element, s_accessMapping, null); + locationMappingNode.AppendChild(accessMappingNode); + accessMappingNode.InnerText = mapping.AccessMappingMoniker; + + XmlNode locationNode = document.CreateNode(XmlNodeType.Element, s_location, null); + locationMappingNode.AppendChild(locationNode); + locationNode.InnerText = mapping.Location; + } + } + + if (definition.ResourceVersion > 0) + { + XmlNode resourceVersionNode = document.CreateNode(XmlNodeType.Element, s_resourceVersion, null); + serviceDefinitionNode.AppendChild(resourceVersionNode); + resourceVersionNode.InnerText = XmlConvert.ToString(definition.ResourceVersion); + } + + if (definition.MinVersionString != null) + { + XmlNode minVersionNode = document.CreateNode(XmlNodeType.Element, s_minVersion, null); + serviceDefinitionNode.AppendChild(minVersionNode); + minVersionNode.InnerText = definition.MinVersionString; + } + + if (definition.MaxVersionString != null) + { + XmlNode maxVersionNode = document.CreateNode(XmlNodeType.Element, s_maxVersion, null); + serviceDefinitionNode.AppendChild(maxVersionNode); + maxVersionNode.InnerText = definition.MaxVersionString; + } + + if (definition.ReleasedVersionString != null) + { + XmlNode releasedVersionNode = document.CreateNode(XmlNodeType.Element, s_releasedVersion, null); + serviceDefinitionNode.AppendChild(releasedVersionNode); + releasedVersionNode.InnerText = definition.ReleasedVersionString; + } + } + } + + public void WriteCachedMisses(XmlNode documentNode, IEnumerable cachedMisses) + { + XmlDocument document = documentNode.OwnerDocument; + + XmlNode cacheMissesNode = document.CreateNode(XmlNodeType.Element, s_cachedMisses, null); + documentNode.AppendChild(cacheMissesNode); + + foreach (String cacheMiss in cachedMisses) + { + XmlNode cacheMissNode = document.CreateNode(XmlNodeType.Element, s_cachedMiss, null); + cacheMissNode.InnerText = cacheMiss; + cacheMissesNode.AppendChild(cacheMissNode); + } + } + + /// + /// Gets the location service url for the access mapping moniker provided. + /// This function should be used to retrieve location service urls for access + /// zones that were loaded by this LocationXmlController instance. + /// + /// The access mapping moniker. + /// The location service url for this access mapping moniker. + public String GetLocationServiceUrl(String moniker) + { + return m_accessMappingLocationServiceUrls[moniker]; + } + + /// + /// Throws and exception if the node provided is null. + /// + /// The node to check. + /// The name of the node to check. + /// The parent node of the node we are checking. + private static void CheckXmlNodeNull(XmlNode node, String nodeName, XmlNode parent) + { + if (node == null) + { + throw new ConfigFileException(CommonResources.XmlNodeMissing(nodeName, parent)); + } + } + + /// + /// Throws an exception if the xml node is null or empty. + /// + /// The node we are checking. + /// The name of the node we are checking. + /// The parent node of the node we are checking. + private static void CheckXmlNodeNullOrEmpty(XmlNode node, String nodeName, XmlNode parent) + { + CheckXmlNodeNull(node, nodeName, parent); + + if (node.InnerText.Equals(String.Empty)) + { + throw new ConfigFileException(CommonResources.XmlNodeEmpty(nodeName, parent.Name)); + } + } + + /// + /// Throws exception if the attribute provided is null or empty + /// + /// The attribute we are checking. + /// The name of the attribute we are checking. + /// The node that contains this attribute. + private static void CheckXmlAttributeNullOrEmpty(XmlAttribute attribute, String attributeName, XmlNode element) + { + if (attribute == null) + { + throw new ConfigFileException(CommonResources.XmlAttributeNull(attributeName, element.Name)); + } + + if (attribute.InnerText.Equals(String.Empty)) + { + throw new ConfigFileException(CommonResources.XmlAttributeEmpty(attributeName, element.Name)); + } + } + + /// + /// Maps access mapping monikers to location service urls + /// + private Dictionary m_accessMappingLocationServiceUrls; + + private Boolean m_isClientCache; + + private static readonly String s_lastChangeId = "LastChangeId"; + private static readonly String s_cacheExpirationDate = "CacheExpirationDate"; + private static readonly String s_defaultAccessMappingMoniker = "DefaultAccessMappingMoniker"; + private static readonly String s_virtualDirectory = "VirtualDirectory"; + + private static readonly String s_services = "Services"; + private static readonly String s_cachedMisses = "CachedMisses"; + private static readonly String s_serviceDefinition = "ServiceDefinition"; + private static readonly String s_cachedMiss = "CachedMiss"; + private static readonly String s_serviceType = "ServiceType"; + private static readonly String s_identifier = "Identifier"; + private static readonly String s_displayName = "DisplayName"; + private static readonly String s_locationServiceUrl = "LocationServiceUrl"; + private static readonly String s_description = "Description"; + private static readonly String s_relativePath = "RelativePath"; + private static readonly String s_relativeTo = "relativeTo"; + private static readonly String s_parentServiceType = "ParentServiceType"; + private static readonly String s_parentIdentifier = "ParentIdentifier"; + private static readonly String s_locationMappings = "LocationMappings"; + private static readonly String s_locationMapping = "LocationMapping"; + private static readonly String s_location = "Location"; + private static readonly String s_resourceVersion = "ResourceVersion"; + private static readonly String s_minVersion = "MinVersion"; + private static readonly String s_maxVersion = "MaxVersion"; + private static readonly String s_releasedVersion = "ReleasedVersion"; + + private static readonly String s_accessMappings = "AccessMappings"; + private static readonly String s_accessMapping = "AccessMapping"; + private static readonly String s_moniker = "Moniker"; + private static readonly String s_accessPoint = "AccessPoint"; + } + + internal static class RelativeToEnumCache + { + private static Dictionary s_relativeToEnums; + + static RelativeToEnumCache() + { + s_relativeToEnums = new Dictionary(StringComparer.OrdinalIgnoreCase); + s_relativeToEnums["Context"] = RelativeToSetting.Context; + s_relativeToEnums["FullyQualified"] = RelativeToSetting.FullyQualified; + s_relativeToEnums["WebApplication"] = RelativeToSetting.WebApplication; + } + + internal static Dictionary GetRelativeToEnums() + { + return s_relativeToEnums; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs b/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs new file mode 100644 index 00000000000..7cbcd80c975 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Location/ServerDataProvider.cs @@ -0,0 +1,816 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Identity; +using GitHub.Services.Location; +using GitHub.Services.Location.Client; +using GitHub.Services.WebApi.Utilities; + +namespace GitHub.Services.WebApi.Location +{ + /// + /// + /// + public interface IVssServerDataProvider : ILocationDataProvider + { + /// + /// + /// + Boolean HasConnected { get; } + + /// + /// + /// + /// + /// + Task GetAuthorizedIdentityAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// + /// + /// + /// + Task GetAuthenticatedIdentityAsync(CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Performs all of the steps that are necessary for setting up a connection + /// with a TeamFoundationServer. Specify what information should be + /// returned in the connectOptions parameter. + /// + /// Each time this call is made the username for the current user + /// will be returned as well as the client zone that this client is making + /// requests from. + /// + /// Specifies what information that should be + /// returned from the server. + Task ConnectAsync(ConnectOptions connectOptions, CancellationToken cancellationToken = default(CancellationToken)); + + /// + /// Reset the connection state back to disconnect + /// The client must reconnect + /// + Task DisconnectAsync(CancellationToken cancellationToken = default(CancellationToken)); + } + + /// + /// This class provides data about the server via the LocationService. + /// + internal class VssServerDataProvider : IVssServerDataProvider + { + public VssServerDataProvider( + VssConnection connection, + HttpMessageHandler pipeline, + String fullyQualifiedUrl) + { + m_connection = connection; + m_baseUri = connection.Uri; + m_fullyQualifiedUrl = fullyQualifiedUrl; + m_locationClient = new LocationHttpClient(m_baseUri, pipeline, false); + + // Try to get the guid for this server + ServerMapData serverData = LocationServerMapCache.ReadServerData(m_fullyQualifiedUrl); + m_locationDataCacheManager = new LocationCacheManager(serverData.ServerId, serverData.ServiceOwner, m_baseUri); + } + + // Back-pointer to connection + internal VssConnection Connection + { + get { return m_connection; } + } + + /// + /// Returns true if this object has successfully authenticated. + /// + public bool HasConnected + { + get + { + return m_connectionMade == true; + } + } + + /// + /// Gets the authorized user. This function will authenticate with the server if it has + /// not done so already. Like any other regular method, it throws VssUnauthorizedException + /// if the server is contacted and authentication fails. + /// + /// The authenticated user. + public async Task GetAuthorizedIdentityAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + await EnsureConnectedAsync(ConnectOptions.None).ConfigureAwait(false); + + Debug.Assert(m_authorizedIdentity != null); + return m_authorizedIdentity; + } + + /// + /// Gets the authenticated user. This function will authenticate with the server if it has + /// not done so already. Like any other regular method, it throws VssUnauthorizedException + /// if the server is contacted and authentication fails. + /// + /// The authenticated user. + public async Task GetAuthenticatedIdentityAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + await EnsureConnectedAsync(ConnectOptions.None).ConfigureAwait(false); + + Debug.Assert(m_authenticatedIdentity != null); + return m_authenticatedIdentity; + } + + /// + /// + /// + public Guid InstanceId + { + get + { + return GetInstanceIdAsync().SyncResult(); + } + } + + /// + /// + /// + public Guid InstanceType + { + get + { + return GetInstanceTypeAsync().SyncResult(); + } + } + + /// + /// The unique identifier for this server. This method will attempt to return + /// a cached value, if possible. + /// + public async Task GetInstanceIdAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + if (!NeedToConnect(ConnectOptions.None)) + { + // We've already made a Connect call and have the authoritative instance ID. + return m_instanceId; + } + else + { + // Check the location server cache to see if we have the instance ID there. + ServerMapData serverData = LocationServerMapCache.ReadServerData(m_fullyQualifiedUrl); + Guid toReturn = serverData.ServerId; + + if (Guid.Empty != toReturn) + { + // We do. Return it. + return toReturn; + } + + // We do not. Make a Connect call and retrieve the instance ID. + await EnsureConnectedAsync(ConnectOptions.None, cancellationToken).ConfigureAwait(false); + return m_instanceId; + } + } + + /// + /// The unique identifier for the service owner. This property will attempt to return + /// a cached value, if possible. + /// + public async Task GetInstanceTypeAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + if (!NeedToConnect(ConnectOptions.None)) + { + // We've already made a Connect call and have the authoritative service owner ID. + return m_serviceOwner; + } + else + { + ServerMapData serverData = LocationServerMapCache.ReadServerData(m_fullyQualifiedUrl); + Guid toReturn = serverData.ServiceOwner; + + if (Guid.Empty != toReturn) + { + // We do. Return it. + return toReturn; + } + + // We do not. Make a Connect call and retrieve the service owner ID. + await EnsureConnectedAsync(ConnectOptions.None, cancellationToken).ConfigureAwait(false); + return m_serviceOwner; + } + } + + public AccessMapping DefaultAccessMapping + { + get + { + return GetDefaultAccessMappingAsync().SyncResult(); + } + } + + /// + /// + /// + /// + /// + public async Task GetDefaultAccessMappingAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + AccessMapping defaultAccessMapping = m_locationDataCacheManager.DefaultAccessMapping; + + // If defaultAccessMapping is null we may not have the cache information yet, go to the server to get the information. + if (defaultAccessMapping == null) + { + await EnsureConnectedAsync(ConnectOptions.IncludeServices, cancellationToken).ConfigureAwait(false); + defaultAccessMapping = m_locationDataCacheManager.DefaultAccessMapping; + + Debug.Assert(defaultAccessMapping != null, "defaultAccessMapping should never be null"); + } + + return defaultAccessMapping; + } + + public AccessMapping ClientAccessMapping + { + get + { + return GetClientAccessMappingAsync().SyncResult(); + } + } + + /// + /// + /// + /// + /// + public async Task GetClientAccessMappingAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + AccessMapping clientAccessMapping = m_locationDataCacheManager.ClientAccessMapping; + + // If definition is null we may not have the cache information yet, go to the server to get the information. + if (clientAccessMapping == null) + { + await EnsureConnectedAsync(ConnectOptions.IncludeServices, cancellationToken).ConfigureAwait(false); + clientAccessMapping = m_locationDataCacheManager.ClientAccessMapping; + + Debug.Assert(clientAccessMapping != null, "clientAccessMapping should never be null"); + } + + return clientAccessMapping; + } + + public IEnumerable ConfiguredAccessMappings + { + get + { + return GetConfiguredAccessMappingsAsync().SyncResult(); + } + } + + /// + /// + /// + /// + /// + public async Task> GetConfiguredAccessMappingsAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + await EnsureConnectedAsync(ConnectOptions.IncludeServices, cancellationToken).ConfigureAwait(false); + return m_locationDataCacheManager.AccessMappings; + } + + public AccessMapping GetAccessMapping(String moniker) + { + return GetAccessMappingAsync(moniker).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + public async Task GetAccessMappingAsync( + String moniker, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(moniker, "moniker"); + + await EnsureConnectedAsync(ConnectOptions.IncludeServices, cancellationToken).ConfigureAwait(false); + return m_locationDataCacheManager.GetAccessMapping(moniker); + } + + public String LocationForAccessMapping(String serviceType, Guid serviceIdentifier, AccessMapping accessMapping) + { + return LocationForAccessMappingAsync(serviceType, serviceIdentifier, accessMapping).SyncResult(); + } + + public async Task LocationForAccessMappingAsync( + String serviceType, + Guid serviceIdentifier, + AccessMapping accessMapping, + CancellationToken cancellationToken = default(CancellationToken)) + { + ServiceDefinition serviceDefinition = await FindServiceDefinitionAsync(serviceType, serviceIdentifier, cancellationToken).ConfigureAwait(false); + + if (serviceDefinition == null) + { + // This method is expected to return a location or fail so throw if we couldn't find + // the service definition. + throw new ServiceDefinitionDoesNotExistException(WebApiResources.ServiceDefinitionDoesNotExist(serviceType, serviceIdentifier)); + } + + return await LocationForAccessMappingAsync(serviceDefinition, accessMapping, cancellationToken).ConfigureAwait(false); + } + + public String LocationForAccessMapping( + ServiceDefinition serviceDefinition, + AccessMapping accessMapping) + { + return LocationForAccessMappingAsync(serviceDefinition, accessMapping).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + /// + public Task LocationForAccessMappingAsync( + ServiceDefinition serviceDefinition, + AccessMapping accessMapping, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(serviceDefinition, "serviceDefinition"); + ArgumentUtility.CheckForNull(accessMapping, "accessMapping"); + + // If this is FullyQualified then look through our location mappings + if (serviceDefinition.RelativeToSetting == RelativeToSetting.FullyQualified) + { + LocationMapping locationMapping = serviceDefinition.GetLocationMapping(accessMapping); + + if (locationMapping != null) + { + return Task.FromResult(locationMapping.Location); + } + + // We weren't able to find the location for the access mapping. Return null. + return Task.FromResult(null); + } + else + { + // Make sure the AccessMapping has a valid AccessPoint. + if (String.IsNullOrEmpty(accessMapping.AccessPoint)) + { + throw new InvalidAccessPointException(WebApiResources.InvalidAccessMappingLocationServiceUrl()); + } + + String webApplicationRelativeDirectory = m_locationDataCacheManager.WebApplicationRelativeDirectory; + + if (accessMapping.VirtualDirectory != null) + { + webApplicationRelativeDirectory = accessMapping.VirtualDirectory; + } + + Uri uri = new Uri(accessMapping.AccessPoint); + + String properRoot = String.Empty; + switch (serviceDefinition.RelativeToSetting) + { + case RelativeToSetting.Context: + properRoot = PathUtility.Combine(uri.AbsoluteUri, webApplicationRelativeDirectory); + break; + case RelativeToSetting.WebApplication: + properRoot = accessMapping.AccessPoint; + break; + default: + Debug.Assert(true, "Found an unknown RelativeToSetting"); + break; + } + + return Task.FromResult(PathUtility.Combine(properRoot, serviceDefinition.RelativePath)); + } + } + + public String LocationForCurrentConnection( + String serviceType, + Guid serviceIdentifier) + { + return LocationForCurrentConnectionAsync(serviceType, serviceIdentifier).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + /// + public async Task LocationForCurrentConnectionAsync( + String serviceType, + Guid serviceIdentifier, + CancellationToken cancellationToken = default(CancellationToken)) + { + if (StringComparer.CurrentCultureIgnoreCase.Equals(serviceType, ServiceInterfaces.LocationService2) && + serviceIdentifier == LocationServiceConstants.SelfReferenceIdentifier) + { + // This is an edge case because the server may not have registered a self-reference pointer + // or the server is legacy and doesn't send back service owner yet. + return m_baseUri.AbsoluteUri; + } + + ServiceDefinition serviceDefinition = await FindServiceDefinitionAsync(serviceType, serviceIdentifier, cancellationToken).ConfigureAwait(false); + + if (serviceDefinition == null) + { + // This method should not throw if a ServiceDefinition could not be found. + return null; + } + + return await LocationForCurrentConnectionAsync(serviceDefinition, cancellationToken).ConfigureAwait(false); + } + + public String LocationForCurrentConnection(ServiceDefinition serviceDefinition) + { + return LocationForCurrentConnectionAsync(serviceDefinition).SyncResult(); + } + + public async Task LocationForCurrentConnectionAsync( + ServiceDefinition serviceDefinition, + CancellationToken cancellationToken = default(CancellationToken)) + { + AccessMapping clientAccessMapping = await GetClientAccessMappingAsync(cancellationToken).ConfigureAwait(false); + String location = await LocationForAccessMappingAsync(serviceDefinition, clientAccessMapping, cancellationToken).ConfigureAwait(false); + + if (location == null) + { + AccessMapping defaultAccessMapping = await GetDefaultAccessMappingAsync(cancellationToken).ConfigureAwait(false); + location = await LocationForAccessMappingAsync(serviceDefinition, defaultAccessMapping, cancellationToken).ConfigureAwait(false); + + if (location == null) + { + LocationMapping firstLocationMapping = serviceDefinition.LocationMappings.FirstOrDefault(); + + if (firstLocationMapping == null) + { + throw new InvalidServiceDefinitionException(WebApiResources.ServiceDefinitionWithNoLocations(serviceDefinition.ServiceType)); + } + + location = firstLocationMapping.Location; + } + } + + return location; + } + + public IEnumerable FindServiceDefinitions(String serviceType) + { + return FindServiceDefinitionsAsync(serviceType).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + public async Task> FindServiceDefinitionsAsync( + String serviceType, + CancellationToken cancellationToken = default(CancellationToken)) + { + // Look in the cache + IEnumerable definitions = null; + + if (m_locationDataCacheManager != null) + { + definitions = m_locationDataCacheManager.FindServices(serviceType); + } + + // If definitions is null, we had a potential cache miss, go to the server to see if our cache is up-to-date + if (definitions == null) + { + await CheckForServerUpdatesAsync(cancellationToken).ConfigureAwait(false); + + // Try again to see if we can find it now in case that something has updated. + return m_locationDataCacheManager.FindServices(serviceType); + } + + return definitions; + } + + public ServiceDefinition FindServiceDefinition(String serviceType, Guid serviceIdentifier) + { + return FindServiceDefinitionAsync(serviceType, serviceIdentifier).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + /// + public async Task FindServiceDefinitionAsync( + String serviceType, + Guid serviceIdentifier, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(serviceType, "serviceType"); + + int lastChangeId = m_locationDataCacheManager.GetLastChangeId(); + + ServiceDefinition definition; + if (m_locationDataCacheManager.TryFindService(serviceType, serviceIdentifier, out definition)) + { + // If we hit a cache entry return it whether it is null or not. + return definition; + } + + // If we got here that means that we have a first-time cache miss, go to the server to see if our cache is up-to-date + await CheckForServerUpdatesAsync(cancellationToken).ConfigureAwait(false); + + // Try again to see if we can find it now in case that something has updated. + if (!m_locationDataCacheManager.TryFindService(serviceType, serviceIdentifier, out definition)) + { + // If it is a LS2 then try to fault the definition in, otherwise add a cache miss + if (String.Equals(serviceType, ServiceInterfaces.LocationService2, StringComparison.OrdinalIgnoreCase) && + serviceIdentifier != LocationServiceConstants.RootIdentifier && + serviceIdentifier != LocationServiceConstants.ApplicationIdentifier && + await GetInstanceTypeAsync(cancellationToken).ConfigureAwait(false) == LocationServiceConstants.RootIdentifier) + { + // Force SPS to fault in the definition + definition = await m_locationClient.GetServiceDefinitionAsync(serviceType, serviceIdentifier, cancellationToken).ConfigureAwait(false); + } + else + { + m_locationDataCacheManager.AddCachedMiss(serviceType, serviceIdentifier, lastChangeId); + return null; + } + } + + return definition; + } + + public ApiResourceLocationCollection GetResourceLocations() + { + return GetResourceLocationsAsync().SyncResult(); + } + + /// + /// + /// + /// + /// + public async Task GetResourceLocationsAsync(CancellationToken cancellationToken = default(CancellationToken)) + { + if (m_resourceLocations == null) + { + IEnumerable definitions = await FindServiceDefinitionsAsync(null).ConfigureAwait(false); + + if (definitions != null) + { + IEnumerable resourceLocationDefinitions = definitions.Where(x => x.ResourceVersion > 0); + + if (resourceLocationDefinitions.Any()) + { + ApiResourceLocationCollection resourceLocations = new ApiResourceLocationCollection(); + + foreach (ServiceDefinition definition in resourceLocationDefinitions) + { + resourceLocations.AddResourceLocation(ApiResourceLocation.FromServiceDefinition(definition)); + } + + m_resourceLocations = resourceLocations; + } + } + } + + return m_resourceLocations; + } + + /// + /// Consults the server to see if any services from the filter array have + /// changed. It updates the cache with the new values. + /// + /// + private async Task CheckForServerUpdatesAsync(CancellationToken cancellationToken = default(CancellationToken)) + { + Boolean checkedForUpdates = await EnsureConnectedAsync(ConnectOptions.IncludeServices, cancellationToken).ConfigureAwait(false); + + if (!checkedForUpdates) + { + Int32 lastChangeId = m_locationDataCacheManager.GetLastChangeId(); + + // If the ServerDataProvider believes it is already connected (i.e. EnsureConnectedAsync returns false) but the location cache is in a bad state + // we need to make another Connect call to get back to a good state. + // This can happen if the disk cache is invalidated by another process (or another VssConnection object in the same process) writing to the shared file + // this will invalidate the memory cache (via FileSystemWatcher), but we could then subsequently fail to reload the disk cache (IOException) for some reason. + if (lastChangeId == -1) + { + await ConnectAsync(ConnectOptions.IncludeServices, cancellationToken).ConfigureAwait(false); + } + } + } + + /// + /// This function ensures that the connection data that is needed by the caller + /// has been retrieved from the server. This function does not use the + /// credentials provider if authentication fails. + /// + /// The options that designate the information the + /// caller needs from the server. + private async Task EnsureConnectedAsync( + ConnectOptions optionsNeeded, + CancellationToken cancellationToken = default(CancellationToken)) + { + if (NeedToConnect(optionsNeeded)) + { + // We only want one thread to make the server call, so we will lock this section. + // It's **really** important that the locked contents (i.e. ConnectAsync) has no recursive path back into this code + // otherwise we will deadlock. + using (await m_connectionLock.LockAsync(cancellationToken).ConfigureAwait(false)) + { + if (NeedToConnect(optionsNeeded)) + { + await ConnectAsync(optionsNeeded, cancellationToken).ConfigureAwait(false); + return true; + } + } + } + + return false; + } + + /// + /// Returns true if we need to connect to the server. + /// + /// + /// + private Boolean NeedToConnect(ConnectOptions optionsNeeded) + { + // Make sure we refresh the information if the impersonated user has changed. + if (m_locationDataCacheManager.CacheDataExpired) + { + m_connectionMade = false; + m_validConnectionData = ConnectOptions.None; + } + + return !m_connectionMade || ((optionsNeeded & m_validConnectionData) != optionsNeeded); + } + + public async Task ConnectAsync(ConnectOptions connectOptions, CancellationToken cancellationToken = default(CancellationToken)) + { + // We want to force ourselves to includes services if our location service cache has no access mappings. + // This means that this is our first time connecting. + if (!m_locationDataCacheManager.AccessMappings.Any()) + { + connectOptions |= ConnectOptions.IncludeServices; + } + + Int32 lastChangeId = m_locationDataCacheManager.GetLastChangeId(); + + // If we have -1 then that means we have no disk cache yet or it means that we recently hit an exception trying to reload + // the the cache from disk (see Exception catch block in EnsureDiskCacheLoaded). + // Either way, we cannot make a call to the server with -1 and pass None. + // If we do, the resulting payload (which would have ClientCacheFresh=false but include no ServiceDefinitions) + // would leave the in-memory cache in an inconsistent state + if (lastChangeId == -1) + { + connectOptions |= ConnectOptions.IncludeServices; + } + + Boolean includeServices = (connectOptions & ConnectOptions.IncludeServices) == ConnectOptions.IncludeServices; + + // Perform the connection + ConnectionData connectionData = await GetConnectionDataAsync(connectOptions, lastChangeId, cancellationToken).ConfigureAwait(false); + LocationServiceData locationServiceData = connectionData.LocationServiceData; + + // If we were previously connected, make sure we cannot connect as a different user. + if (m_authenticatedIdentity != null) + { + if (!IdentityDescriptorComparer.Instance.Equals(m_authenticatedIdentity.Descriptor, connectionData.AuthenticatedUser.Descriptor)) + { + throw new VssAuthenticationException(WebApiResources.CannotAuthenticateAsAnotherUser(m_authenticatedIdentity.DisplayName, connectionData.AuthenticatedUser.DisplayName)); + } + } + + m_authenticatedIdentity = connectionData.AuthenticatedUser; + m_authorizedIdentity = connectionData.AuthorizedUser; + + m_instanceId = connectionData.InstanceId; + + if (locationServiceData != null) + { + Guid serviceOwner = connectionData.LocationServiceData.ServiceOwner; + + if (Guid.Empty == serviceOwner) + { + serviceOwner = ServiceInstanceTypes.TFSOnPremises; + } + + m_serviceOwner = serviceOwner; + } + + // Verify with our locationServerMap cache that we are storing the correct guid + // for this server. If we are, this is essentially a no-op. + Boolean wroteMapping = LocationServerMapCache.EnsureServerMappingExists(m_fullyQualifiedUrl, m_instanceId, m_serviceOwner); + + if (wroteMapping) + { + if (includeServices && + (connectionData.LocationServiceData.ServiceDefinitions == null || + connectionData.LocationServiceData.ServiceDefinitions.Count == 0)) + { + // This is the rare, rare case where a new server exists at the same url + // that an old server used to (guids are different) and both servers had the same + // location service last change id. In that case, Connect would not have + // brought down any services. To fix this we need to query the services back + // down with -1 as our last change id + ConnectionData updatedConnectionData = await GetConnectionDataAsync(ConnectOptions.IncludeServices, -1, cancellationToken).ConfigureAwait(false); + locationServiceData = updatedConnectionData.LocationServiceData; + } + + m_locationDataCacheManager = new LocationCacheManager(m_instanceId, m_serviceOwner, m_baseUri); + } + + // update the location service cache if we tried to retireve location service data + m_locationDataCacheManager.WebApplicationRelativeDirectory = connectionData.WebApplicationRelativeDirectory; + if (locationServiceData != null) + { + m_locationDataCacheManager.LoadServicesData(locationServiceData, includeServices); + } + + // Set the connection data that we have retrieved + m_validConnectionData |= connectOptions; + + m_connectionMade = true; + } + + /// + /// Reset the connected state of the provider + /// + public Task DisconnectAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + m_connectionMade = false; + m_authenticatedIdentity = null; + m_authorizedIdentity = null; + return Task.FromResult(null); + } + + /// + /// Passed in on construction. The Uris for the server we are connecting to. + /// + private VssConnection m_connection; + private Uri m_baseUri; + private String m_fullyQualifiedUrl; + + /// + /// These are the values we are responsible for determining + /// + private Identity.Identity m_authenticatedIdentity; + private Identity.Identity m_authorizedIdentity; + private Guid m_instanceId; + private Guid m_serviceOwner; + + /// + /// These handle talking to the web service and dealing with connection data + /// + private LocationHttpClient m_locationClient; + private ConnectOptions m_validConnectionData; + private Boolean m_connectionMade; + + /// + /// This object manages the location data cache + /// + private LocationCacheManager m_locationDataCacheManager; + + /// + /// Cache of the resource locations + /// + private ApiResourceLocationCollection m_resourceLocations; + + private readonly AsyncLock m_connectionLock = new AsyncLock(); + + private async Task GetConnectionDataAsync(ConnectOptions connectOptions, int lastChangeId, CancellationToken cancellationToken) + { + int timeoutRetries = 1; + + while (true) + { + try + { + return await m_locationClient.GetConnectionDataAsync(connectOptions, lastChangeId, cancellationToken).ConfigureAwait(false); + } + catch(TimeoutException) when (timeoutRetries-- > 0) { } // Catch TimeoutException when we have retries remaining; otherwise, let it go. + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/IVssOAuthTokenParameterProvider.cs b/src/Sdk/WebApi/WebApi/OAuth/IVssOAuthTokenParameterProvider.cs new file mode 100644 index 00000000000..eb013eceaca --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/IVssOAuthTokenParameterProvider.cs @@ -0,0 +1,18 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.OAuth +{ + /// + /// Represents an object which participates in setting parameters for an OAuth token request. + /// + public interface IVssOAuthTokenParameterProvider + { + /// + /// Sets applicable parameters on the provided parameters collection for a token request in which the provider + /// is a participant. + /// + /// The current set of parameters + void SetParameters(IDictionary parameters); + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessToken.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessToken.cs new file mode 100644 index 00000000000..530d4c4d947 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessToken.cs @@ -0,0 +1,84 @@ +using System; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides encapsulation for opaque access tokens in OAuth token exchanges. + /// + public sealed class VssOAuthAccessToken : IssuedToken + { + /// + /// Initializes a new VssOAuthAccessToken instance with the specified value. + /// + /// The value of the access token, encoded as a string + public VssOAuthAccessToken(String value) + : this(value, DateTime.MaxValue) + { + } + + /// + /// Initializes a new VssOAuthAccessToken instance with the specified value and expiration time. + /// + /// The value of the access token, encoded as a string + /// The date and time when this token is no longer valid + public VssOAuthAccessToken( + String value, + DateTime validTo) + { + ArgumentUtility.CheckStringForNullOrEmpty(value, nameof(value)); + m_value = value; + m_validTo = validTo; + } + + /// + /// Initializes a new VssOAuthAccessToken instance with the specified JWT. + /// + /// The value of the access token, encoded as a JsonWebToken + public VssOAuthAccessToken(JsonWebToken value) + { + ArgumentUtility.CheckForNull(value, nameof(value)); + m_value = value.EncodedToken; + m_validTo = value.ValidTo; + } + + /// + /// Gets the date and time at which this token expires. + /// + public DateTime ValidTo + { + get + { + return m_validTo; + } + } + + /// + /// Gets the value of the current token. + /// + public String Value + { + get + { + return m_value; + } + } + + protected internal override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.OAuth; + } + } + + internal override void ApplyTo(IHttpRequest request) + { + request.Headers.SetValue(Common.Internal.HttpHeaders.Authorization, $"Bearer {m_value}"); + } + + private readonly String m_value; + private readonly DateTime m_validTo; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessTokenCredential.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessTokenCredential.cs new file mode 100644 index 00000000000..03cf0c012ea --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthAccessTokenCredential.cs @@ -0,0 +1,85 @@ +using System; +using System.Net.Http; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides authentication for OAuth 2.0 access tokens issued without credentials. + /// + public class VssOAuthAccessTokenCredential : FederatedCredential + { + /// + /// Initializes a new VssOAuthAccessTokenCredential instance with the specified access token encoded as + /// a string. + /// + /// The access token value encoded as a string + public VssOAuthAccessTokenCredential(String accessToken) + : this(new VssOAuthAccessToken(accessToken)) + { + } + + /// + /// Initializes a new VssOAuthAccessTokenCredential instance with the specified access token encoded as + /// a JWT. + /// + /// The access token value encoded as a JWT + public VssOAuthAccessTokenCredential(JsonWebToken accessToken) + : this(new VssOAuthAccessToken(accessToken)) + { + } + + /// + /// Initializes a new VssOAuthAccessTokenCredential instance with the specified access token. + /// + /// The access token + public VssOAuthAccessTokenCredential(VssOAuthAccessToken accessToken) + : base(accessToken) + { + } + + /// + /// Gets the type of the current credentials. + /// + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.OAuth; + } + } + + /// + /// Returns a no-op token provider. This credential does not provide token acquisition functionality. + /// + /// The server URL from which the challenge originated + /// The authentication challenge response message + /// A no-op token provider for supplying the access token + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + return new VssOAuthAccessTokenProvider(this, serverUrl, null); + } + + private class VssOAuthAccessTokenProvider : IssuedTokenProvider + { + public VssOAuthAccessTokenProvider( + IssuedTokenCredential credential, + Uri serverUrl, + Uri signInUrl) + : base(credential, serverUrl, signInUrl) + { + } + + public override Boolean GetTokenIsInteractive + { + get + { + return false; + } + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredential.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredential.cs new file mode 100644 index 00000000000..c70754d8dda --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredential.cs @@ -0,0 +1,78 @@ +using System; +using System.Collections.Generic; +using GitHub.Services.Common; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides client credentials for proof of identity in OAuth 2.0 token exchanges. + /// + public abstract class VssOAuthClientCredential : IVssOAuthTokenParameterProvider, IDisposable + { + protected VssOAuthClientCredential( + VssOAuthClientCredentialType type, + String clientId) + { + ArgumentUtility.CheckStringForNullOrEmpty(clientId, nameof(clientId)); + + m_type = type; + m_clientId = clientId; + } + + /// + /// Gets the client identifier. + /// + public String ClientId + { + get + { + return m_clientId; + } + } + + /// + /// Gets the type of credentials for this instance. + /// + public VssOAuthClientCredentialType CredentialType + { + get + { + return m_type; + } + } + + /// + /// Disposes of managed resources referenced by the credentials. + /// + public void Dispose() + { + if (m_disposed) + { + return; + } + + m_disposed = true; + Dispose(true); + } + + protected virtual void Dispose(Boolean disposing) + { + } + + /// + /// When overridden in a derived class, the corresponding token request parameters should be set for the + /// credential type represented by the instance. + /// + /// The parameters to post to an authorization server + protected abstract void SetParameters(IDictionary parameters); + + void IVssOAuthTokenParameterProvider.SetParameters(IDictionary parameters) + { + SetParameters(parameters); + } + + private Boolean m_disposed; + private readonly String m_clientId; + private readonly VssOAuthClientCredentialType m_type; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialType.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialType.cs new file mode 100644 index 00000000000..61837c2c74a --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialType.cs @@ -0,0 +1,18 @@ +namespace GitHub.Services.OAuth +{ + /// + /// Lists the supported client credential types + /// + public enum VssOAuthClientCredentialType + { + /// + /// Client Password for OAuth 2.0 Client Authentication + /// + Password, + + /// + /// JWT Bearer Token Profile for OAuth 2.0 Client Authentication + /// + JwtBearer, + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialsGrant.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialsGrant.cs new file mode 100644 index 00000000000..ceb06d3a2e2 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthClientCredentialsGrant.cs @@ -0,0 +1,24 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.OAuth +{ + /// + /// Represents the client credentials grant for OAuth 2.0 token exchanges. + /// + public sealed class VssOAuthClientCredentialsGrant : VssOAuthGrant + { + /// + /// Initializes a new VssOAuthClientCredentials grant. + /// + public VssOAuthClientCredentialsGrant() + : base(VssOAuthGrantType.ClientCredentials) + { + } + + protected override void SetParameters(IDictionary parameters) + { + parameters[VssOAuthConstants.GrantType] = VssOAuthConstants.ClientCredentialsGrantType; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthConstants.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthConstants.cs new file mode 100644 index 00000000000..dbff7277607 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthConstants.cs @@ -0,0 +1,82 @@ +using System; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides constants used in token exhanges for OAuth 2.0 + /// + public static class VssOAuthConstants + { + /// + /// Assertion parameter for token requests. + /// + public const String Assertion = "assertion"; + + /// + /// Authorization Code Grant for OAuth 2.0 + /// + public const String AuthorizationCodeGrantType = "authorization_code"; + + /// + /// Client Credentials Grant for OAuth 2.0 + /// + public const String ClientCredentialsGrantType = "client_credentials"; + + /// + /// Client ID parameter for client authentication. + /// + public const String ClientId = "client_id"; + + /// + /// Client secret parameter for client authentication. + /// + // [SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine")] -- The "Password" that follows is not valid anywhere + public const String ClientSecret = "client_secret"; + + /// + /// Client assertion parameter for client authentication. + /// + public const String ClientAssertion = "client_assertion"; + + /// + /// Client assertion type parameter for client authentication. + /// + public const String ClientAssertionType = "client_assertion_type"; + + /// + /// Code parameter for authorization code token requests. + /// + public const String Code = "code"; + + /// + /// Grant type parameter for token requests. + /// + public const String GrantType = "grant_type"; + + /// + /// JWT Bearer Token Grant Type Profile for OAuth 2.0 + /// + /// + /// See http://tools.ietf.org/html/rfc7523 + /// + public const String JwtBearerAuthorizationGrantType = "urn:ietf:params:oauth:grant-type:jwt-bearer"; + + /// + /// JWT Bearer Token Profile for OAuth 2.0 Client Authentication + /// + /// + /// See http://tools.ietf.org/html/rfc7523 + /// + public const String JwtBearerClientAssertionType = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"; + + /// + /// Refresh token parameter for token requests. + /// + public const String RefreshToken = "refresh_token"; + + /// + /// Refresh Token Grant for OAuth 2.0 + /// + public const String RefreshTokenGrantType = "refresh_token"; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthCredential.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthCredential.cs new file mode 100644 index 00000000000..b5c4ea5c130 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthCredential.cs @@ -0,0 +1,133 @@ +using System; +using System.Linq; +using System.Net; +using System.Net.Http; +using GitHub.Services.Common; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides authentication with a secure token service using the OAuth 2.0 protocol. + /// + public class VssOAuthCredential : FederatedCredential + { + /// + /// Initializes a new VssOAuthCredential instance with the specified authorization grant and client + /// credentials. + /// + /// The location of the token endpoint for the target authorization server + /// The grant to provide for the token exchange + /// The client credentials to provide for the token exchange + /// An optional set of token parameters which, if present, are sent in the request body of the token request + /// An optional access token which, if present, is used prior to requesting new tokens + public VssOAuthCredential( + Uri authorizationUrl, + VssOAuthGrant grant, + VssOAuthClientCredential clientCredential, + VssOAuthTokenParameters tokenParameters = null, + VssOAuthAccessToken accessToken = null) + : base(accessToken) + { + ArgumentUtility.CheckForNull(authorizationUrl, nameof(authorizationUrl)); + ArgumentUtility.CheckForNull(grant, nameof(grant)); + + m_authorizationUrl = authorizationUrl; + m_grant = grant; + m_tokenParameters = tokenParameters; + m_clientCredential = clientCredential; + } + + /// + /// Gets the type of issued token credential. + /// + public override VssCredentialsType CredentialType + { + get + { + return VssCredentialsType.OAuth; + } + } + + /// + /// Gets the authorization endpoint for this credential. + /// + public Uri AuthorizationUrl + { + get + { + return m_authorizationUrl; + } + } + + /// + /// Gets the grant for this credential. + /// + public VssOAuthGrant Grant + { + get + { + return m_grant; + } + } + + /// + /// Gets the client credentials for this credential. + /// + public VssOAuthClientCredential ClientCredential + { + get + { + return m_clientCredential; + } + } + + /// + /// Gets the set of additional token parameters configured for the credential. + /// + public VssOAuthTokenParameters TokenParameters + { + get + { + if (m_tokenParameters == null) + { + m_tokenParameters = new VssOAuthTokenParameters(); + } + return m_tokenParameters; + } + } + + /// + /// Determines whether or not the response reperesents an authentication challenge for the current credential. + /// + /// The response to analyze + /// True if the web response indicates an authorization challenge; otherwise, false + public override Boolean IsAuthenticationChallenge(IHttpResponse webResponse) + { + if (webResponse == null) + { + return false; + } + + if (webResponse.StatusCode == HttpStatusCode.Found || + webResponse.StatusCode == HttpStatusCode.Unauthorized) + { + return webResponse.Headers.GetValues(Common.Internal.HttpHeaders.WwwAuthenticate).Any(x => x.IndexOf("Bearer", StringComparison.OrdinalIgnoreCase) >= 0); + } + + return false; + } + + protected override IssuedTokenProvider OnCreateTokenProvider( + Uri serverUrl, + IHttpResponse response) + { + return new VssOAuthTokenProvider(this, serverUrl); + } + + private VssOAuthTokenParameters m_tokenParameters; + + private readonly Uri m_authorizationUrl; + private readonly VssOAuthGrant m_grant; + private readonly VssOAuthClientCredential m_clientCredential; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs new file mode 100644 index 00000000000..5ebf86f9a8d --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthExceptions.cs @@ -0,0 +1,94 @@ +using System; +using System.Runtime.Serialization; +using GitHub.Services.Common; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides a base class for all OAuth exceptions. + /// + [Serializable] + public class VssOAuthException : VssServiceException + { + /// + /// Initializes a new VssOAuthException instance with the specified message. + /// + /// The error message that explains the reason for the exception + public VssOAuthException(String message) + : base(message) + { + } + + /// + /// Initializes a new VssOAuthException instance with the specified message. + /// + /// The error message that explains the reason for the exception + /// An object that describes the error that caused the current exception + public VssOAuthException(String message, Exception innerException) + : base(message, innerException) + { + } + + /// + /// Initializes a new VssOAuthException instance with serialized data. + /// + /// The SerializationInfo that holds the serialized object data about the exception being thrown + /// The StreamingContext that contains contextual information about the source or destination + protected VssOAuthException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + } + } + + /// + /// Thrown when an exception is encountered processing an OAuth 2.0 token request. + /// + [Serializable] + public class VssOAuthTokenRequestException : VssOAuthException + { + /// + /// Initializes a new VssOAuthTokenRequestException instance with the specified message. + /// + /// The error message that explains the reason for the exception + public VssOAuthTokenRequestException(String message) + : base(message) + { + } + + /// + /// Initializes a new VssOAuthTokenRequestException instance with the specified message. + /// + /// The error message that explains the reason for the exception + /// An object that describes the error that caused the current exception + public VssOAuthTokenRequestException(String message, Exception innerException) + : base(message, innerException) + { + } + + /// + /// Initializes a new VssOAuthException instance with serialized data. + /// + /// The SerializationInfo that holds the serialized object data about the exception being thrown + /// The StreamingContext that contains contextual information about the source or destination + protected VssOAuthTokenRequestException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + this.Error = info.GetString("m_error"); + } + + /// + /// Gets or sets the OAuth 2.0 error code. See for potential values. + /// + public String Error + { + get; + set; + } + + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("m_error", this.Error); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrant.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrant.cs new file mode 100644 index 00000000000..f27435297f8 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrant.cs @@ -0,0 +1,57 @@ +using System; +using System.Collections.Generic; + +namespace GitHub.Services.OAuth +{ + /// + /// Represents an authorization grant in an OAuth 2.0 token exchange. + /// + public abstract class VssOAuthGrant : IVssOAuthTokenParameterProvider + { + /// + /// Initializes a new VssOAuthGrant instance with the specified grant type. + /// + /// The type of authorization grant + protected VssOAuthGrant(VssOAuthGrantType grantType) + { + m_grantType = grantType; + } + + /// + /// Gets the type of authorization grant. + /// + public VssOAuthGrantType GrantType + { + get + { + return m_grantType; + } + } + + /// + /// Gets the client credentials authorization grant. + /// + public static VssOAuthClientCredentialsGrant ClientCredentials + { + get + { + return s_clientCredentialsGrant.Value; + } + } + + /// + /// When overridden in a derived class, the corresponding token request parameters should be set for the + /// grant type represented by the instance. + /// + /// The parameters to post to an authorization server + protected abstract void SetParameters(IDictionary parameters); + + void IVssOAuthTokenParameterProvider.SetParameters(IDictionary parameters) + { + SetParameters(parameters); + } + + private readonly VssOAuthGrantType m_grantType; + private static readonly Lazy s_clientCredentialsGrant = new Lazy(() => new VssOAuthClientCredentialsGrant()); + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrantType.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrantType.cs new file mode 100644 index 00000000000..23834cb1537 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthGrantType.cs @@ -0,0 +1,28 @@ +namespace GitHub.Services.OAuth +{ + /// + /// Lists the supported authorization grant types + /// + public enum VssOAuthGrantType + { + /// + /// Authorization Code Grant for OAuth 2.0 + /// + AuthorizationCode, + + /// + /// Client Credentials Grant for OAuth 2.0 + /// + ClientCredentials, + + /// + /// JWT Bearer Token Grant Type Profile for OAuth 2.0 + /// + JwtBearer, + + /// + /// Refresh Token Grant for OAuth 2.0 + /// + RefreshToken, + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerAssertion.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerAssertion.cs new file mode 100644 index 00000000000..6e451beb212 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerAssertion.cs @@ -0,0 +1,149 @@ +using System; +using System.Collections.Generic; +using System.Security.Claims; +using GitHub.Services.WebApi; +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.OAuth +{ + /// + /// Represents a bearer token assertion for either JWT Bearer Token Profile for OAuth 2.0 Client Authentication + /// or JWT Bearer Token Grant Type Profile for OAuth 2.0. + /// + public class VssOAuthJwtBearerAssertion + { + /// + /// Initializes a new VssOAuthJwtBearerAssertion with the specified token as the assertion value. + /// + /// The JsonWebToken instance representing the assertion + internal VssOAuthJwtBearerAssertion(JsonWebToken bearerToken) + { + m_bearerToken = bearerToken; + } + + /// + /// Initializes a new VssOAuthJwtBearerAssertion with the specified issuer, subject, audience, + /// and signing credentials for generating a bearer token. + /// + /// The iss claim for the bearer token + /// The sub claim for the bearer token + /// The aud claim for the bearer token + /// The credentials used to sign the bearer token + public VssOAuthJwtBearerAssertion( + String issuer, + String subject, + String audience, + VssSigningCredentials signingCredentials) + : this(issuer, subject, audience, null, signingCredentials) + { + } + + /// + /// Initializes a new VssOAuthJwtBearerAssertion with the specified issuer, subject, audience, + /// and signing credentials for generating a bearer token. + /// + /// The iss claim for the bearer token + /// The sub claim for the bearer token + /// The aud claim for the bearer token + /// An optional list of additional claims to provide with the bearer token + /// The credentials used to sign the bearer token + public VssOAuthJwtBearerAssertion( + String issuer, + String subject, + String audience, + IList additionalClaims, + VssSigningCredentials signingCredentials) + { + m_issuer = issuer; + m_subject = subject; + m_audience = audience; + m_signingCredentials = signingCredentials; + + if (additionalClaims != null) + { + this.additionalClaims = new List(additionalClaims); + } + } + + /// + /// Gets the issuer (iss claim) for the credentials. + /// + public String Issuer + { + get + { + return m_issuer; + } + } + + /// + /// Gets the subject (sub claim) for the credentials. + /// + public String Subject + { + get + { + return m_subject; + } + } + + /// + /// Gets the audience (aud claim) for the credentials. + /// + public String Audience + { + get + { + return m_audience; + } + } + + /// + /// Gets a list of additional claims provided with the credentials. + /// + public IList AdditionalClaims + { + get + { + if (additionalClaims == null) + { + additionalClaims = new List(); + } + return additionalClaims; + } + } + + /// + /// Gets a JsonWebToken instance based on the values provided to the assertion. + /// + /// A signed JsonWebToken instance for presentation as a bearer token + public JsonWebToken GetBearerToken() + { + if (m_bearerToken != null) + { + return m_bearerToken; + } + else + { + var additionalClaims = new List(this.AdditionalClaims ?? new Claim[0]); + if (!String.IsNullOrEmpty(m_subject)) + { + additionalClaims.Add(new Claim(JsonWebTokenClaims.Subject, m_subject)); + } + + additionalClaims.Add(new Claim(JsonWebTokenClaims.TokenId, Guid.NewGuid().ToString())); + + var nowUtc = DateTime.UtcNow; + return JsonWebToken.Create(m_issuer, m_audience, nowUtc, nowUtc.Add(BearerTokenLifetime), additionalClaims, m_signingCredentials); + } + } + + private List additionalClaims; + private readonly String m_issuer; + private readonly String m_subject; + private readonly String m_audience; + private readonly JsonWebToken m_bearerToken; + private readonly VssSigningCredentials m_signingCredentials; + private static readonly TimeSpan BearerTokenLifetime = TimeSpan.FromMinutes(5); + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerClientCredential.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerClientCredential.cs new file mode 100644 index 00000000000..970fb72df5b --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthJwtBearerClientCredential.cs @@ -0,0 +1,62 @@ +using System; +using System.Collections.Generic; +using GitHub.Services.Common; +using GitHub.Services.WebApi; + +namespace GitHub.Services.OAuth +{ + /// + /// Implements the JWT Bearer Token Profile for OAuth 2.0 Client Authentication. + /// + public sealed class VssOAuthJwtBearerClientCredential : VssOAuthClientCredential + { + /// + /// Initializes a new VssOAuthJwtBearerClientCredential with the specified client identifier and audience. The + /// credential will be used for the JWT Bearer Token Profile for Client Authentication as a client assertion. + /// + /// The client identifier issued by the authorization server + /// The target audience for the bearer assertion. This is usually the authorization URL + /// The signing credentials for proof of client identity + public VssOAuthJwtBearerClientCredential( + String clientId, + String audience, + VssSigningCredentials signingCredentials) + : this(clientId, new VssOAuthJwtBearerAssertion(clientId, clientId, audience, signingCredentials)) + { + } + + /// + /// Initializes a new VssOAuthJwtBearerClientCredential with the specified JWT bearer assertion. + /// + /// The client identifier issued by the authorization server + /// The client assertion for proof of identity + public VssOAuthJwtBearerClientCredential( + String clientId, + VssOAuthJwtBearerAssertion assertion) + : base(VssOAuthClientCredentialType.JwtBearer, clientId) + { + ArgumentUtility.CheckForNull(assertion, nameof(assertion)); + + m_assertion = assertion; + } + + /// + /// Gets the jwt-bearer assertion for issuing tokens. + /// + public VssOAuthJwtBearerAssertion Assertion + { + get + { + return m_assertion; + } + } + + protected override void SetParameters(IDictionary parameters) + { + parameters[VssOAuthConstants.ClientAssertionType] = VssOAuthConstants.JwtBearerClientAssertionType; + parameters[VssOAuthConstants.ClientAssertion] = m_assertion.GetBearerToken().EncodedToken; + } + + private readonly VssOAuthJwtBearerAssertion m_assertion; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenHttpClient.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenHttpClient.cs new file mode 100644 index 00000000000..8f0158f18ed --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenHttpClient.cs @@ -0,0 +1,216 @@ +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Net.Http.Formatting; +using System.Net.Http.Headers; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.WebApi; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides a method for exchanging tokens with a secure token service which supports OAuth 2.0. + /// + public class VssOAuthTokenHttpClient + { + /// + /// Initializes a new VssOAuthTokenHttpClient using the specified authorization URL as the token + /// exchange endpoint. Requests for tokens will be posted to the provided URL. + /// + /// The endpoint used to acquire new tokens from the secure token service + public VssOAuthTokenHttpClient(Uri authorizationUrl) + { + ArgumentUtility.CheckForNull(authorizationUrl, nameof(authorizationUrl)); + m_authorizationUrl = authorizationUrl; + m_formatter = new VssJsonMediaTypeFormatter(); + } + + /// + /// Gets the authorization URL for the secure token service. + /// + public Uri AuthorizationUrl + { + get + { + return m_authorizationUrl; + } + } + + /// + /// Performs a token exchange using the specified token request. + /// + /// The token request + /// A token for signalling cancellation + /// A Task<VssOAuthTokenResponse> which may be used to track progress of the token request + public Task GetTokenAsync( + VssOAuthTokenRequest request, + CancellationToken cancellationToken = default(CancellationToken)) + { + ArgumentUtility.CheckForNull(request, nameof(request)); + return GetTokenAsync(request.Grant, request.ClientCredential, request.Parameters, cancellationToken); + } + + /// + /// Performs a token exchange using the specified authorization grant and client credentials. + /// + /// The authorization grant for the token request + /// The credentials to present to the secure token service as proof of identity + /// An collection of additional parameters to provide for the token request + /// A token for signalling cancellation + /// A Task<VssOAuthTokenResponse> which may be used to track progress of the token request + public async Task GetTokenAsync( + VssOAuthGrant grant, + VssOAuthClientCredential credential, + VssOAuthTokenParameters tokenParameters = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + VssTraceActivity traceActivity = VssTraceActivity.Current; + using (HttpClient client = new HttpClient(CreateMessageHandler(this.AuthorizationUrl))) + { + var requestMessage = new HttpRequestMessage(HttpMethod.Post, this.AuthorizationUrl); + requestMessage.Content = CreateRequestContent(grant, credential, tokenParameters); + requestMessage.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + +#if NETSTANDARD + if (VssClientHttpRequestSettings.Default.UseHttp11) + { + requestMessage.Version = HttpVersion.Version11; + } +#endif + + foreach (var headerVal in VssClientHttpRequestSettings.Default.UserAgent) + { + if (!requestMessage.Headers.UserAgent.Contains(headerVal)) + { + requestMessage.Headers.UserAgent.Add(headerVal); + } + } + + using (var response = await client.SendAsync(requestMessage, cancellationToken: cancellationToken).ConfigureAwait(false)) + { + string correlationId = "Unknown"; + if (response.Headers.TryGetValues("x-ms-request-id", out IEnumerable requestIds)) + { + correlationId = string.Join(",", requestIds); + } + VssHttpEventSource.Log.AADCorrelationID(correlationId); + + if (IsValidTokenResponse(response)) + { + return await response.Content.ReadAsAsync(new[] { m_formatter }, cancellationToken).ConfigureAwait(false); + } + else + { + var responseContent = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + throw new VssServiceResponseException(response.StatusCode, responseContent, null); + } + } + } + } + + private static Boolean IsValidTokenResponse(HttpResponseMessage response) + { + return response.StatusCode == HttpStatusCode.OK || (response.StatusCode == HttpStatusCode.BadRequest && IsJsonResponse(response)); + } + + private static HttpMessageHandler CreateMessageHandler(Uri requestUri) + { + var retryOptions = new VssHttpRetryOptions() + { + RetryableStatusCodes = + { + HttpStatusCode.InternalServerError, + VssNetworkHelper.TooManyRequests, + }, + }; + +#if !NETSTANDARD + WebRequestHandler messageHandler = new WebRequestHandler() + { + UseDefaultCredentials = false + }; +#else + HttpClientHandler messageHandler = new HttpClientHandler() + { + UseDefaultCredentials = false + }; +#endif + + // Inherit proxy setting from VssHttpMessageHandler + if (VssHttpMessageHandler.DefaultWebProxy != null) + { + messageHandler.Proxy = VssHttpMessageHandler.DefaultWebProxy; + messageHandler.UseProxy = true; + } + + if (requestUri.Scheme.Equals("https", StringComparison.OrdinalIgnoreCase) && + VssClientHttpRequestSettings.Default.ClientCertificateManager != null && + VssClientHttpRequestSettings.Default.ClientCertificateManager.ClientCertificates != null && + VssClientHttpRequestSettings.Default.ClientCertificateManager.ClientCertificates.Count > 0) + { + messageHandler.ClientCertificates.AddRange(VssClientHttpRequestSettings.Default.ClientCertificateManager.ClientCertificates); + } + + if (requestUri.Scheme.Equals("https", StringComparison.OrdinalIgnoreCase) && + VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback != null) + { +#if !NETSTANDARD + messageHandler.ServerCertificateValidationCallback = VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback; +#else + messageHandler.ServerCertificateCustomValidationCallback = VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback; +#endif + } + + return new VssHttpRetryMessageHandler(retryOptions, messageHandler); + } + + private static HttpContent CreateRequestContent(params IVssOAuthTokenParameterProvider[] parameterProviders) + { + var parameters = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var parameterProvider in parameterProviders) + { + if (parameterProvider != null) + { + parameterProvider.SetParameters(parameters); + } + } + + return new FormUrlEncodedContent(parameters); + } + + private static Boolean HasContent(HttpResponseMessage response) + { + if (response != null && + response.StatusCode != HttpStatusCode.NoContent && + response.Content != null && + response.Content.Headers != null && + response.Content.Headers.ContentLength.HasValue && + response.Content.Headers.ContentLength != 0) + { + return true; + } + + return false; + } + + private static Boolean IsJsonResponse(HttpResponseMessage response) + { + if (HasContent(response) && + response.Content.Headers != null && + response.Content.Headers.ContentType != null && + !String.IsNullOrEmpty(response.Content.Headers.ContentType.MediaType)) + { + return String.Equals("application/json", response.Content.Headers.ContentType.MediaType, StringComparison.OrdinalIgnoreCase); + } + + return false; + } + + private readonly Uri m_authorizationUrl; + private readonly MediaTypeFormatter m_formatter; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenParameters.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenParameters.cs new file mode 100644 index 00000000000..e214121ec3e --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenParameters.cs @@ -0,0 +1,115 @@ +using System; +using System.Collections.Generic; +using Newtonsoft.Json; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides additional parameters for OAuth 2.0 token requests. Existing values may be removed by setting the + /// property to null. Properties with no value should use an empty string. + /// + [JsonDictionary] + public class VssOAuthTokenParameters : Dictionary, IVssOAuthTokenParameterProvider + { + /// + /// Initializes a new VssOAuthTokenParameters instance with no additional parameters. + /// + public VssOAuthTokenParameters() + : base(StringComparer.OrdinalIgnoreCase) + { + } + + /// + /// Gets or sets the redirect_uri parameter, specifying the redirection endpoint for the user-agent after + /// the authorization server completes interaction with the resource owner. + /// + public String RedirectUri + { + get + { + return GetValueOrDefault("redirect_uri"); + } + set + { + RemoveOrSetValue("redirect_uri", value); + } + } + + /// + /// Gets or sets the resource parameter, indicating the target resource for the token request. + /// + /// + /// At the time of writing, the specification for this parameter may be found at the link below. + /// https://datatracker.ietf.org/doc/draft-campbell-oauth-resource-indicators/?include_text=1 + /// + public String Resource + { + get + { + return GetValueOrDefault("resource"); + } + set + { + RemoveOrSetValue("resource", value); + } + } + + /// + /// Gets or sets the scope parameter, indicating the scope of the access request. + /// + public String Scope + { + get + { + return GetValueOrDefault("scope"); + } + set + { + RemoveOrSetValue("scope", value); + } + } + + + /// + /// Gets a string representation of the additional parameters as a JSON string. + /// + /// A string representation of the parameters which are set + public override String ToString() + { + return JsonConvert.SerializeObject(this); + } + + void IVssOAuthTokenParameterProvider.SetParameters(IDictionary parameters) + { + foreach (var parameter in this) + { + parameters[parameter.Key] = parameter.Value; + } + } + + private String GetValueOrDefault(String key) + { + String value; + if (!TryGetValue(key, out value)) + { + value = null; + } + + return value; + } + + private void RemoveOrSetValue( + String key, + String value) + { + if (value == null) + { + this.Remove(key); + } + else + { + this[key] = value; + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenProvider.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenProvider.cs new file mode 100644 index 00000000000..e910b259f6b --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenProvider.cs @@ -0,0 +1,214 @@ +using System; +using System.Globalization; +using System.IO; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.WebApi; +using Newtonsoft.Json; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides functionality to acquire access tokens for OAuth 2.0. + /// + public class VssOAuthTokenProvider : IssuedTokenProvider + { + /// + /// Initializes a new VssOAuthTokenProvider instance for the specified credential. + /// + /// The VssOAuthCredential instance which owns the token provider + /// The resource server which issued the authentication challenge + public VssOAuthTokenProvider( + VssOAuthCredential credential, + Uri serverUrl) + : this(credential, serverUrl, credential.AuthorizationUrl, credential.Grant, credential.ClientCredential, credential.TokenParameters) + { + m_credential = credential; + } + + /// + /// Initializes a new VssOAuthTokenProvider instance for the specified credential. + /// + /// The VssOAuthCredential instance which owns the token provider + /// The resource server which issued the authentication challenge + /// The authorization server token endpoint + /// The authorization grant to use for token requests + /// The client credentials to use for token requests + /// Additional parameters to include with token requests + protected VssOAuthTokenProvider( + IssuedTokenCredential credential, + Uri serverUrl, + Uri authorizationUrl, + VssOAuthGrant grant, + VssOAuthClientCredential clientCrential, + VssOAuthTokenParameters tokenParameters) + : base(credential, serverUrl, authorizationUrl) + { + m_grant = grant; + m_tokenParameters = tokenParameters; + m_clientCredential = clientCrential; + } + + /// + /// Gets the authorization grant for the token provider. + /// + public VssOAuthGrant Grant + { + get + { + return m_grant; + } + } + + /// + /// Gets the client credentials for the token provider. + /// + public VssOAuthClientCredential ClientCredential + { + get + { + return m_clientCredential; + } + } + + /// + /// Gets the additional parameters configured for the token provider. + /// + public VssOAuthTokenParameters TokenParameters + { + get + { + return m_tokenParameters; + } + } + + /// + /// Gets a value indicating whether or not this token provider requires interactivity. + /// + public override Boolean GetTokenIsInteractive + { + get + { + return false; + } + } + + protected override String AuthenticationParameter + { + get + { + if (this.ClientCredential == null) + { + return null; + } + else + { + return String.Format(CultureInfo.InvariantCulture, "client_id=\"{0}\" audience=\"{1}\"", this.ClientCredential.ClientId, this.SignInUrl.AbsoluteUri); + } + } + } + + protected override String AuthenticationScheme + { + get + { + return "Bearer"; + } + } + + /// + /// Issues a token request to the configured secure token service. On success, the access token issued by the + /// token service is returned to the caller + /// + /// If applicable, the previous token which is now considered invalid + /// A token used for signalling cancellation + /// A Task&lgt;IssuedToken> for tracking the progress of the token request + protected override async Task OnGetTokenAsync( + IssuedToken failedToken, + CancellationToken cancellationToken) + { + if (this.SignInUrl == null || + this.Grant == null || + this.ClientCredential == null) + { + return null; + } + + IssuedToken issuedToken = null; + var traceActivity = VssTraceActivity.Current; + try + { + var tokenHttpClient = new VssOAuthTokenHttpClient(this.SignInUrl); + var tokenResponse = await tokenHttpClient.GetTokenAsync(this.Grant, this.ClientCredential, this.TokenParameters, cancellationToken).ConfigureAwait(false); + if (!String.IsNullOrEmpty(tokenResponse.AccessToken)) + { + // Construct a new access token based on the response, including the expiration time so we know + // when to refresh the token. + issuedToken = CreateIssuedToken(tokenResponse); + + if (!String.IsNullOrEmpty(tokenResponse.RefreshToken)) + { + // TODO: How should this flow be handled? Refresh Token is a credential change which is not + // the same thing as access token storage + } + } + else if (!String.IsNullOrEmpty(tokenResponse.Error)) + { + // Raise a new exception describing the underlying authentication error + throw new VssOAuthTokenRequestException(tokenResponse.ErrorDescription) + { + Error = tokenResponse.Error, + }; + } + else + { + // If the error property isn't set, but we didn't get an access token, then it's not + // clear what the issue is. In this case just trace the response and fall through with + // a null access token return value. + var sb = new StringBuilder(); + var serializer = JsonSerializer.Create(s_traceSettings.Value); + using (var sr = new StringWriter(sb)) + { + serializer.Serialize(sr, tokenResponse); + } + + VssHttpEventSource.Log.AuthenticationError(traceActivity, this, sb.ToString()); + } + } + catch (VssServiceResponseException ex) + { + VssHttpEventSource.Log.AuthenticationError(traceActivity, this, ex); + } + + return issuedToken; + } + + protected virtual IssuedToken CreateIssuedToken(VssOAuthTokenResponse tokenResponse) + { + if (tokenResponse.ExpiresIn > 0) + { + return new VssOAuthAccessToken(tokenResponse.AccessToken, DateTime.UtcNow.AddSeconds(tokenResponse.ExpiresIn)); + } + else + { + return new VssOAuthAccessToken(tokenResponse.AccessToken); + } + } + + private static JsonSerializerSettings CreateTraceSettings() + { + var settings = new VssJsonMediaTypeFormatter().SerializerSettings; + settings.Formatting = Formatting.Indented; + return settings; + } + + private readonly VssOAuthGrant m_grant; + private readonly VssOAuthCredential m_credential; + private readonly VssOAuthTokenParameters m_tokenParameters; + private readonly VssOAuthClientCredential m_clientCredential; + private static readonly Lazy s_traceSettings = new Lazy(CreateTraceSettings); + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenRequest.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenRequest.cs new file mode 100644 index 00000000000..3e21ab7e3c7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenRequest.cs @@ -0,0 +1,232 @@ +using System; +using System.Collections.Generic; +using System.Net.Http.Formatting; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.OAuth +{ + /// + /// Encapsulates the data used in an OAuth 2.0 token request. + /// + public class VssOAuthTokenRequest + { + /// + /// Initializes a new VssOAuthTokenRequest instance with the specified grant and client credential. + /// + /// The authorization grant to use for the token request + /// The client credential to use for the token request + public VssOAuthTokenRequest( + VssOAuthGrant grant, + VssOAuthClientCredential clientCredential) + : this(grant, clientCredential, null) + { + } + + /// + /// Initializes a new VssOAuthTokenRequest instance with the specified grant and client credential. + /// Additional parameters specified by the token parameters will be provided in the token request. + /// + /// The authorization grant to use for the token request + /// The client credential to use for the token request + /// An optional set of additional parameters for the token request + public VssOAuthTokenRequest( + VssOAuthGrant grant, + VssOAuthClientCredential clientCredential, + VssOAuthTokenParameters tokenParameters) + { + ArgumentUtility.CheckForNull(grant, nameof(grant)); + + m_grant = grant; + m_clientCredential = clientCredential; + m_tokenParameters = tokenParameters; + } + + /// + /// Gets the authorization grant for this token request. + /// + public VssOAuthGrant Grant + { + get + { + return m_grant; + } + } + + /// + /// Gets the client credential for this token request. Depending on the grant ype used, this value may be null. + /// + public VssOAuthClientCredential ClientCredential + { + get + { + return m_clientCredential; + } + } + + /// + /// Gets the optional set of additional parameters for this token request. + /// + public VssOAuthTokenParameters Parameters + { + get + { + if (m_tokenParameters == null) + { + m_tokenParameters = new VssOAuthTokenParameters(); + } + return m_tokenParameters; + } + } + +#if !NETSTANDARD + /// + /// Initializes a new VssOAuthTokenRequest instance from the specified form input. + /// + /// The input which should be parsed into a token request + /// A new VssOAuthTokenRequest instance representative of the provided form input + public static VssOAuthTokenRequest FromFormInput(FormDataCollection form) + { + var parsedParameters = new HashSet(); + var grant = CreateGrantFromFormInput(form, parsedParameters); + var clientCredential = CreateClientCredentialFromFormInput(form, parsedParameters); + + var tokenParameters = new VssOAuthTokenParameters(); + foreach (var parameter in form) + { + // Only include parameters in the extended set if we didn't already read them in the grant and + // credentials parsing logic. + if (parsedParameters.Add(parameter.Key)) + { + tokenParameters.Add(parameter.Key, parameter.Value); + } + } + + return new VssOAuthTokenRequest(grant, clientCredential, tokenParameters); + } + + private static VssOAuthGrant CreateGrantFromFormInput( + FormDataCollection form, + ISet parsedParameters) + { + ArgumentUtility.CheckForNull(form, nameof(form)); + + var grantType = GetRequiredValue(form, VssOAuthConstants.GrantType, VssOAuthErrorCodes.InvalidRequest); + switch (grantType) + { + case VssOAuthConstants.AuthorizationCodeGrantType: + var codeValue = GetRequiredValue(form, VssOAuthConstants.Code, VssOAuthErrorCodes.InvalidRequest); + parsedParameters.Add(VssOAuthConstants.Code); + return new VssOAuthCodeGrant(codeValue); + + case VssOAuthConstants.ClientCredentialsGrantType: + return VssOAuthGrant.ClientCredentials; + + case VssOAuthConstants.JwtBearerAuthorizationGrantType: + var assertionValue = GetRequiredValue(form, VssOAuthConstants.Assertion, VssOAuthErrorCodes.InvalidRequest); + parsedParameters.Add(VssOAuthConstants.Assertion); + var assertion = JsonWebToken.Create(assertionValue); + return new VssOAuthJwtBearerGrant(new VssOAuthJwtBearerAssertion(assertion)); + + case VssOAuthConstants.RefreshTokenGrantType: + var refreshTokenValue = GetRequiredValue(form, VssOAuthConstants.RefreshToken, VssOAuthErrorCodes.InvalidRequest); + parsedParameters.Add(VssOAuthConstants.RefreshToken); + return new VssOAuthRefreshTokenGrant(refreshTokenValue); + + default: + // The OAuth 2.0 spec explicitly allows only ASCII characters in the error description + throw new VssOAuthTokenRequestException($"{VssOAuthConstants.GrantType} {grantType} is not supported") + { + Error = VssOAuthErrorCodes.UnsupportedGrantType, + }; + } + } + + private static VssOAuthClientCredential CreateClientCredentialFromFormInput( + FormDataCollection form, + ISet parsedParameters) + { + // https://tools.ietf.org/html/rfc7521#section-4.2 + // See the above document for rules on processing client assertions w.r.t other credential types. + var clientId = form[VssOAuthConstants.ClientId]; + var clientAssertionType = form[VssOAuthConstants.ClientAssertionType]; + if (clientAssertionType == VssOAuthConstants.JwtBearerClientAssertionType) + { + var clientAssertionValue = GetRequiredValue(form, VssOAuthConstants.ClientAssertion, VssOAuthErrorCodes.InvalidClient); + JsonWebToken clientAssertion = null; + try + { + clientAssertion = JsonWebToken.Create(clientAssertionValue); + } + catch (JsonWebTokenDeserializationException ex) + { + // The OAuth 2.0 spec explicitly allows only ASCII characters in the error description + throw new VssOAuthTokenRequestException($"{VssOAuthConstants.ClientAssertion} is not in the correct format", ex) + { + Error = VssOAuthErrorCodes.InvalidClient + }; + } + + // If the client id parameter is present when client assertions are used then it must match exactly + // the subject claim of the token. + if (!String.IsNullOrEmpty(clientId)) + { + if (clientId.Equals(clientAssertion.Subject, StringComparison.Ordinal)) + { + parsedParameters.Add(VssOAuthConstants.ClientId); + } + else + { + // The OAuth 2.0 spec explicitly allows only ASCII characters in the error description + throw new VssOAuthTokenRequestException($"{VssOAuthConstants.ClientId} {clientId} does not match {VssOAuthConstants.ClientAssertion} subject {clientAssertion.Subject}") + { + Error = VssOAuthErrorCodes.InvalidClient, + }; + } + } + else + { + clientId = clientAssertion.Subject; + } + + parsedParameters.Add(VssOAuthConstants.ClientAssertion); + parsedParameters.Add(VssOAuthConstants.ClientAssertionType); + return new VssOAuthJwtBearerClientCredential(clientId, new VssOAuthJwtBearerAssertion(clientAssertion)); + } + + if (!String.IsNullOrEmpty(clientId)) + { + parsedParameters.Add(VssOAuthConstants.ClientId); + + var clientSecret = form[VssOAuthConstants.ClientSecret]; + if (!String.IsNullOrEmpty(clientSecret)) + { + parsedParameters.Add(VssOAuthConstants.ClientSecret); + return new VssOAuthPasswordClientCredential(clientId, clientSecret); + } + } + + return null; + } + + private static String GetRequiredValue( + FormDataCollection form, + String parameterName, + String error) + { + var value = form[parameterName]; + if (String.IsNullOrEmpty(value)) + { + throw new VssOAuthTokenRequestException($"{parameterName} is required") { Error = error }; + } + + return value; + } +#endif + + private VssOAuthTokenParameters m_tokenParameters; + + private readonly VssOAuthGrant m_grant; + private readonly VssOAuthClientCredential m_clientCredential; + } +} diff --git a/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenResponse.cs b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenResponse.cs new file mode 100644 index 00000000000..fab331726cb --- /dev/null +++ b/src/Sdk/WebApi/WebApi/OAuth/VssOAuthTokenResponse.cs @@ -0,0 +1,89 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.OAuth +{ + /// + /// Provides the properties for the response of a token exchange in OAuth 2.0 + /// + [DataContract] + public class VssOAuthTokenResponse + { + /// + /// Initializes a new VssOAuthTokenResponse instance with empty values. + /// + public VssOAuthTokenResponse() + { + } + + /// + /// Gets or sets the access token for the response. + /// + [DataMember(Name = "access_token", EmitDefaultValue = false)] + public String AccessToken + { + get; + set; + } + + /// + /// Gets or sets the error for the response. + /// + [DataMember(Name = "error", EmitDefaultValue = false)] + public String Error + { + get; + set; + } + + /// + /// Gets or sets the error description for the response. + /// + [DataMember(Name = "errordescription", EmitDefaultValue = false)] + public String ErrorDescription + { + get; + set; + } + + /// + /// Gets or sets a value indicating the remaining duration, in seconds, of the access token. + /// + [DataMember(Name = "expires_in", EmitDefaultValue = false)] + public Int32 ExpiresIn + { + get; + set; + } + + /// + /// Gets or sets the refresh token for the response, if applicable. + /// + [DataMember(Name = "refresh_token", EmitDefaultValue = false)] + public String RefreshToken + { + get; + set; + } + + /// + /// Gets or sets the scope or scopes of access for the provided access token. + /// + [DataMember(Name = "scope", EmitDefaultValue = false)] + public String Scope + { + get; + set; + } + + /// + /// Gets or sets the type of token for the response. + /// + [DataMember(Name = "token_type", EmitDefaultValue = false)] + public String TokenType + { + get; + set; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Profile/ProfileArgumentValidation.cs b/src/Sdk/WebApi/WebApi/Profile/ProfileArgumentValidation.cs new file mode 100644 index 00000000000..ff27f4257d8 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Profile/ProfileArgumentValidation.cs @@ -0,0 +1,40 @@ +using GitHub.Services.Common; +using System; + +namespace GitHub.Services.Profile +{ + public static class ProfileArgumentValidation + { + public static void ValidateAttributeName(string attributeName) + { + ArgumentUtility.CheckStringForNullOrEmpty(attributeName, "attributeName", true); + ArgumentUtility.CheckStringForInvalidCharacters(attributeName, "attributeName"); + if (attributeName.Contains(Semicolon)) + { + throw new ArgumentException("Attribute name cannot contain the character ';'", attributeName); + } + } + + public static void ValidateContainerName(string containerName) + { + ArgumentUtility.CheckStringForNullOrEmpty(containerName, "containerName", true); + ArgumentUtility.CheckStringForInvalidCharacters(containerName, "containerName"); + if (containerName.Contains(Semicolon)) + { + throw new ArgumentException("Container name cannot contain the character ';'", containerName); + } + } + + public static void ValidateApplicationContainerName(string containerName) + { + ValidateContainerName(containerName); + if (VssStringComparer.AttributesDescriptor.Compare(containerName, Profile.CoreContainerName) == 0) + { + throw new ArgumentException( + string.Format("The container name '{0}' is reserved. Please specify a valid application container name", Profile.CoreContainerName), "containerName"); + } + } + + private const string Semicolon = ";"; + } +} diff --git a/src/Sdk/WebApi/WebApi/ProxyAuthenticationRequiredException.cs b/src/Sdk/WebApi/WebApi/ProxyAuthenticationRequiredException.cs new file mode 100644 index 00000000000..8d5261719ec --- /dev/null +++ b/src/Sdk/WebApi/WebApi/ProxyAuthenticationRequiredException.cs @@ -0,0 +1,29 @@ +using System; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi +{ + [ExceptionMapping("0.0", "3.0", "ProxyAuthenticationRequiredException", "GitHub.Services.WebApi.ProxyAuthenticationRequiredException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ProxyAuthenticationRequiredException : VssException + { + public ProxyAuthenticationRequiredException() + : base(WebApiResources.ProxyAuthenticationRequired()) + { + this.HelpLink = HelpLinkUrl; + } + + public ProxyAuthenticationRequiredException(string message, Exception innerException) + : base(message, innerException) + { + this.HelpLink = HelpLinkUrl; + } + + public ProxyAuthenticationRequiredException(string message) + : base(message) + { + this.HelpLink = HelpLinkUrl; + } + + private const string HelpLinkUrl = ""; + } +} diff --git a/src/Sdk/WebApi/WebApi/PublicAccessJsonConverter.cs b/src/Sdk/WebApi/WebApi/PublicAccessJsonConverter.cs new file mode 100644 index 00000000000..9a39a06e510 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/PublicAccessJsonConverter.cs @@ -0,0 +1,71 @@ +using System; +using System.ComponentModel; +using System.Reflection; +using Newtonsoft.Json; + +namespace GitHub.Services.WebApi +{ + /// + /// A JsonConverter that sets the value of a property or field to its type's default value if the value is not null + /// and the ShouldClearValueFunction property is set and the ShouldClearValueFunction function returns true. This + /// can only be used on properties and fields. The converter will fail if used on at the class level. + /// + /// The type of the property or field. + [EditorBrowsable(EditorBrowsableState.Never)] + public class DefaultValueOnPublicAccessJsonConverter : PublicAccessJsonConverter + { + public override object GetDefaultValue() + { + return default(T); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PublicAccessJsonConverter : PublicAccessJsonConverter + { + public PublicAccessJsonConverter() + { + if (typeof(T) == typeof(bool)) + { + // We are not supporting boolean types. This is because the converter does not get invoked in case of default values, + // therefore you can infer that a boolean type is false if the value does not exist in the json, and true if it does + // exist in the json even though the value would be set to false. + throw new ArgumentException($"The {nameof(PublicAccessJsonConverter)} does not support Boolean types, because the value can be inferred from the existance or non existance of the property in the json."); + } + } + + public override bool CanConvert(Type objectType) + { + return typeof(T).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public abstract class PublicAccessJsonConverter : JsonConverter + { + public abstract object GetDefaultValue(); + + public override bool CanRead => false; + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + throw new NotImplementedException(); + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + if (value != null && ShouldClearValueFunction != null && ShouldClearValueFunction()) + { + serializer.Serialize(writer, GetDefaultValue()); + } + else + { + // this is the default serialization. This will fail if the converter is used at the class level rather than + // at the member level, because the default serialization will reinvoke this converter resulting in an exception. + serializer.Serialize(writer, value); + } + } + + internal static Func ShouldClearValueFunction { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/ResourceLocationIds.cs b/src/Sdk/WebApi/WebApi/ResourceLocationIds.cs new file mode 100644 index 00000000000..f4ccaa58bc7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/ResourceLocationIds.cs @@ -0,0 +1,1511 @@ +using System; +using GitHub.Services.Common; + +//each resource (aka "controller") needs its own guid +//for the location service. Defined here so we can use it on the client +//to look it up. +namespace GitHub.Services.Identity +{ + public static class IdentityResourceIds + { + public const string AreaId = "8A3D49B8-91F0-46EF-B33D-DDA338C25DB3"; + public const string AreaName = "IMS"; + public static readonly Guid Identity = new Guid("{28010C54-D0C0-4C89-A5B0-1C9E188B9FB7}"); + public const string IdentityResource = "Identities"; + public static readonly Guid IdentityBatch = new Guid("{299E50DF-FE45-4D3A-8B5B-A5836FAC74DC}"); + public const string IdentityBatchResource = "IdentityBatch"; + public static readonly Guid Group = new Guid("{5966283B-4196-4D57-9211-1B68F41EC1C2}"); + public const string GroupResource = "Groups"; + public static readonly Guid Scope = new Guid("{4E11E2BF-1E79-4EB5-8F34-A6337BD0DE38}"); + public const string ScopeResource = "Scopes"; + public const string MemberLocationIdString = "8BA35978-138E-41F8-8963-7B1EA2C5F775"; + public static readonly Guid Member = new Guid(MemberLocationIdString); + public const string MemberResource = "Members"; + public const string MemberOfLocationIdString = "22865B02-9E4A-479E-9E18-E35B8803B8A0"; + public static readonly Guid MemberOf = new Guid(MemberOfLocationIdString); + public const string MemberOfResource = "MembersOf"; + public static readonly Guid IdentityDebug = new Guid("{C6B859A5-248C-448A-B770-D373C6E165BD}"); + public const string IdentityDebugResource = "IdentitiesDebug"; + public static readonly Guid IdentitySnapshot = new Guid("{D56223DF-8CCD-45C9-89B4-EDDF692400D7}"); + public const string IdentitySnapshotResource = "IdentitySnapshot"; + public static readonly Guid IdentitySelf = new Guid("{4BB02B5B-C120-4BE2-B68E-21F7C50A4B82}"); + public const string IdentitySelfResource = "me"; + public static readonly Guid SignoutToken = new Guid("{BE39E83C-7529-45E9-9C67-0410885880DA}"); + public const string SignoutTokenResource = "SignoutToken"; + public static readonly Guid SignedInToken = new Guid("{6074FF18-AAAD-4ABB-A41E-5C75F6178057}"); + public const string SignedInTokenResource = "SignedInToken"; + public static readonly Guid IdentityTenant = new Guid("{5F0A1723-2E2C-4C31-8CAE-002D01BDD592}"); + public const string IdentityTenantResource = "tenant"; + public static readonly Guid FrameworkIdentity = new Guid("{DD55F0EB-6EA2-4FE4-9EBE-919E7DD1DFB4}"); + public const string FrameworkIdentityResource = "Identity"; + public static readonly Guid IdentityMaxSequenceId = new Guid("{E4A70778-CB2C-4E85-B7CC-3F3C7AE2D408}"); + public const string IdentityMaxSequenceIdResource = "MaxSequenceId"; + public static readonly Guid Claims = new Guid("{90ddfe71-171c-446c-bf3b-b597cd562afd}"); + public const string ClaimsResource = "Claims"; + public static readonly Guid Rights = new Guid("{05F0AD48-6AEF-42A8-9D9B-9AB650875A5D}"); + public const string RightsResource = "Rights"; + public static readonly Guid RightsBatch = new Guid("{908B4EDC-4C6A-41E8-88ED-07A1F01A9A59}"); + + public static readonly Guid DescriptorsResourceLocationId = new Guid("A230389A-94F2-496C-839F-C929787496DD"); + public const string DescriptorsResourceName = "descriptors"; + + public static readonly Guid SwapLocationId = new Guid("{7A2338C2-39D8-4906-9889-E8BC9C52CBB2}"); + public const string SwapResource = "Swap"; + } +} + +namespace GitHub.Services.Graph +{ + public class GraphResourceIds + { + public const string AreaName = "Graph"; + public const string AreaId = "BB1E7EC9-E901-4B68-999A-DE7012B920F8"; + public static readonly Guid AreaIdGuid = new Guid(AreaId); + + public class Groups + { + public const string GroupsResourceName = "Groups"; + public static readonly Guid GroupsResourceLocationId = new Guid("EBBE6AF8-0B91-4C13-8CF1-777C14858188"); + } + + public class Descriptors + { + public static readonly Guid DescriptorsResourceLocationId = new Guid("048AEE0A-7072-4CDE-AB73-7AF77B1E0B4E"); + public const string DescriptorsResourceName = "Descriptors"; + } + + public class Memberships + { + public static readonly Guid MembershipsResourceLocationId = new Guid("3FD2E6CA-FB30-443A-B579-95B19ED0934C"); + public const string MembershipsResourceName = "Memberships"; + + public static readonly Guid MembershipsBatchResourceLocationId = new Guid("E34B6394-6B30-4435-94A9-409A5EEF3E31"); + public const string MembershipsBatchResourceName = "MembershipsBatch"; + + public static readonly Guid MembershipStatesResourceLocationId = new Guid("1FFE5C94-1144-4191-907B-D0211CAD36A8"); + public const string MembershipStatesResourceName = "MembershipStates"; + } + + public class Scopes + { + public const string ScopesResourceName = "Scopes"; + public static readonly Guid ScopesResourceLocationId = new Guid("21B5FEA7-2513-41D0-AF78-B8CDB0F328BB"); + } + + public class SubjectLookup + { + public const string SubjectLookupResourceName = "SubjectLookup"; + public static readonly Guid SubjectLookupResourceLocationId = new Guid("4DD4D168-11F2-48C4-83E8-756FA0DE027C"); + } + + public class Users + { + public const string UsersResourceName = "Users"; + public static readonly Guid UsersResourceLocationId = new Guid("005E26EC-6B77-4E4F-A986-B3827BF241F5"); + + public class ProviderInfo + { + public const string ProviderInfoResourceName = "ProviderInfo"; + public static readonly Guid ProviderInfoResourceLocationId = new Guid("1E377995-6FA2-4588-BD64-930186ABDCFA"); + } + } + + public class Subjects + { + public const string SubjectsResourceName = "Subjects"; + public static readonly Guid SubjectsResourceLocationId = new Guid("1D44A2AC-4F8A-459E-83C2-1C92626FB9C6"); + + public class Avatars + { + public const string AvatarsResourceName = "Avatars"; + public static readonly Guid AvatarsResourceLocationId = new Guid("801EAF9C-0585-4BE8-9CDB-B0EFA074DE91"); + } + } + + public class Members + { + public const string MembersResourceName = "Members"; + + public const string MembersByDescriptorResourceLocationIdString = "B9AF63A7-5DB6-4AF8-AAE7-387F775EA9C6"; + public static readonly Guid MembersByDescriptorResourceLocationId = new Guid(MembersByDescriptorResourceLocationIdString); + + public const string MembersResourceLocationIdString = "8B9ECDB2-B752-485A-8418-CC15CF12EE07"; + public static readonly Guid MembersResourceLocationId = new Guid(MembersResourceLocationIdString); + } + + public class CachePolicies + { + public const string CachePoliciesResourceName = "CachePolicies"; + public static readonly Guid CachePoliciesResourceLocationId = new Guid("BEB83272-B415-48E8-AC1E-A9B805760739"); + } + + public class MemberLookup + { + public const string MemberLookupResourceName = "MemberLookup"; + public static readonly Guid MemberLookupResourceLocationId = new Guid("3D74D524-AE3D-4D24-A9A7-F8A5CF82347A"); + } + + public class StorageKeys + { + public const string StorageKeysResourceName = "StorageKeys"; + public static readonly Guid StorageKeysResourceLocationId = new Guid("EB85F8CC-F0F6-4264-A5B1-FFE2E4D4801F"); + } + + public class MembershipTraversals + { + public const string MembershipTraversalsResourceName = "MembershipTraversals"; + public static readonly Guid MembershipTraversalsLocationId = new Guid("5D59D874-746F-4F9B-9459-0E571F1DED8C"); + } + + public class FederatedProviderData + { + public static readonly Guid FederatedProviderDataResourceLocationId = new Guid("5DCD28D6-632D-477F-AC6B-398EA9FC2F71"); + public const string FederatedProviderDataResourceName = "FederatedProviderData"; + } + } +} + +namespace GitHub.GraphProfile.WebApi +{ + [GenerateAllConstants] + public static class GraphProfileResourceIds + { + public const String AreaId = "4E40F190-2E3F-4D9F-8331-C7788E833080"; + public const String AreaName = "GraphProfile"; + public static readonly Guid AreaIdGuid = new Guid(AreaId); + + public class MemberAvatars + { + public const String MemberAvatarsResourceName = "MemberAvatars"; + public static readonly Guid MemberAvatarsLocationId = new Guid("D443431F-B341-42E4-85CF-A5B0D639ED8F"); + } + } +} + +namespace GitHub.Services.OAuth +{ + public static class TokenOAuth2ResourceIds + { + public const string AreaName = "tokenoauth2"; + public const string AreaId = "01c5c153-8bc0-4f07-912a-ec4dc386076d"; + + public const string TokenResource = "token"; + public static readonly Guid Token = new Guid("{bbc63806-e448-4e88-8c57-0af77747a323}"); + } + + public static class OAuth2ResourceIds + { + public const string AreaName = "oauth2"; + public const string AreaId = "585028FE-17D8-49E2-9A1B-EFB4D8502156"; + + public const string TokenResource = "token"; + public static readonly Guid Token = new Guid("{CD12634C-1D0F-4A19-9FF3-17875B764932}"); + } +} + +namespace GitHub.Services.Tokens +{ + public static class TokenAuthResourceIds + { + public const string AreaName = "TokenAuth"; + public const string AreaId = "c5a2d98b-985c-432e-825e-3c6971edae87"; + + public const string AuthorizationResource = "Authorizations"; + public static readonly Guid Authorization = new Guid("7d7ddc0d-60bd-4978-a0b5-295cb099a400"); + + public const string HostAuthorizationResource = "HostAuthorization"; + public static readonly Guid HostAuthorizeId = new Guid("{817d2b46-1507-4efe-be2b-adccf17ffd3b}"); + + public const string RegistrationResource = "Registration"; + public static readonly Guid Registration = new Guid("{522ad1a0-389d-4c6f-90da-b145fd2d3ad8}"); + + public const string RegistrationSecretResource = "RegistrationSecret"; + public static readonly Guid RegistrationSecret = new Guid("{74896548-9cdd-4315-8aeb-9ecd88fceb21}"); + } + + public static class TokenIssueResourceIds + { + public const string AreaName = "TokenIssue"; + public const string AreaId = "6b10046c-829d-44d2-8a1d-02f88f4ff032"; + + public const string AccessTokenResource = "AccessTokens"; + public static readonly Guid AccessToken = new Guid("{24691e90-c8bd-42c0-8aae-71b7511a797a}"); + + public const string SessionTokenResource = "SessionTokens"; + public static readonly Guid SessionToken = new Guid("{98e25729-952a-4b1f-ac89-7ca8b9803261}"); + + public const string AadUserTokenResource = "AadUserTokens"; + public static readonly Guid AadUserToken = new Guid("{4cbff9ec-7f69-4d40-a82e-cca1e8545d01}"); + + public const string AadAppTokenResource = "AadAppTokens"; + public static readonly Guid AadAppToken = new Guid("{f15de83d-4b1d-4387-90aa-e72c0ce10b3e}"); + + public const string AppSessionTokenResource = "AppSessionTokens"; + public static readonly Guid AppSessionToken = new Guid("{325f73ea-e978-4ad1-8f3a-c30b39000a17}"); + + public const string AppTokenPairResource = "AppTokenPairs"; + public static readonly Guid AppTokenPair = new Guid("{9030cb81-c1fd-4f3b-9910-c90eb559b830}"); + } + + public static class TokenTokenExpirationResourceIds + { + public const string AreaName = "TokenExpiration"; + public const string AreaId = "339c63b0-d305-4fd3-958a-22b8e0eb6fc2"; + + public const string TokenExpirationResource = "Token"; + public static readonly Guid TokenExpiration = new Guid("{e04f61f2-a03d-4aec-8b0f-6e8511fe5adc}"); + } + + public static class DelegatedAuthResourceIds + { + public const string AreaName = "DelegatedAuth"; + public const string AreaId = "A0848FA1-3593-4AEC-949C-694C73F4C4CE"; + + public const string AuthorizationResource = "Authorizations"; + public static readonly Guid Authorization = new Guid("EFBF6E0C-1150-43FD-B869-7E2B04FC0D09"); + + public const string HostAuthorizationResource = "HostAuthorization"; + public static readonly Guid HostAuthorizeId = new Guid("{7372FDD9-238C-467C-B0F2-995F4BFE0D94}"); + + public const string RegistrationResource = "Registration"; + public static readonly Guid Registration = new Guid("{909CD090-3005-480D-A1B4-220B76CB0AFE}"); + + public const string RegistrationSecretResource = "RegistrationSecret"; + public static readonly Guid RegistrationSecret = new Guid("{F37E5023-DFBE-490E-9E40-7B7FB6B67887}"); + } + + public static class TokenResourceIds + { + public const string AreaName = "Token"; + public const string AreaId = "0AD75E84-88AE-4325-84B5-EBB30910283C"; + + public const string AccessTokenResource = "AccessTokens"; + public static readonly Guid AccessToken = new Guid("{94C2BCFB-BF10-4B41-AC01-738122D6B5E0}"); + + public const string SessionTokenResource = "SessionTokens"; + public static readonly Guid SessionToken = new Guid("{ADA996BC-8C18-4193-B20C-CD41B13F5B4D}"); + + public const string AadUserTokenResource = "AadUserTokens"; + public static readonly Guid AadUserToken = new Guid("{6A8B6E50-FDA9-4AC1-9536-678C28BE2F7D}"); + + public const string AadAppTokenResource = "AadAppTokens"; + public static readonly Guid AadAppToken = new Guid("{11B3E525-35D3-4373-8985-EA72887427DB}"); + + public const string AppSessionTokenResource = "AppSessionTokens"; + public static readonly Guid AppSessionToken = new Guid("{B743B207-6DC5-457B-B1DF-B9B63D640F0B}"); + + public const string AppTokenPairResource = "AppTokenPairs"; + public static readonly Guid AppTokenPair = new Guid("{9CE3C96A-34A2-41AF-807D-205DA73F227B}"); + } + + public static class PropertyCacheResourceIds + { + public const string AreaName = "Cache"; + public const string AreaId = "0B808CEB-EF49-4C5E-9483-600A4ECF1224"; + + public const string PropertyCacheResource = "Properties"; + public static readonly Guid PropertyCache = new Guid("{656342EB-AE7D-4FF2-802F-19C6E35B0FE6}"); + } +} + +namespace GitHub.Services.TokenAdmin.Client +{ + public static class TokenAdminResourceIds + { + public const string AreaName = "TokenAdmin"; + public const string AreaId = "af68438b-ed04-4407-9eb6-f1dbae3f922e"; + + public const string PersonalAccessTokensResource = "PersonalAccessTokens"; + public static readonly Guid PersonalAccessTokensLocationId = new Guid("{af68438b-ed04-4407-9eb6-f1dbae3f922e}"); + + public const string RevocationsResource = "Revocations"; + public static readonly Guid RevocationsLocationId = new Guid("{a9c08b2c-5466-4e22-8626-1ff304ffdf0f}"); + + public const string RevocationRulesResource = "RevocationRules"; + public static readonly Guid RevocationRulesLocationId = new Guid("{ee4afb16-e7ab-4ed8-9d4b-4ef3e78f97e4}"); + } +} + +namespace GitHub.Services.Tokens.TokenAdmin.Client +{ + public static class TokenAdministrationResourceIds + { + public const string TokenAreaName = "TokenAdministration"; + public const string TokenAreaId = "95935461-9E54-44BD-B9FB-04F4DD05D640"; + + public const string TokenPersonalAccessTokensResource = "TokenPersonalAccessTokens"; + public static readonly Guid TokenPersonalAccessTokensLocationId = new Guid("{1BB7DB14-87C5-4762-BF77-A70AD34A9AB3}"); + + public const string TokenRevocationsResource = "TokenRevocations"; + public static readonly Guid TokenRevocationsLocationId = new Guid("{A2E4520B-1CC8-4526-871E-F3A8F865F221}"); + + public const string TokenListGlobalIdentities = "TokenListGlobalIdentities"; + public static readonly Guid TokenListGlobalIdentitiesId = new Guid("{30D3A12B-66C3-4669-B016-ECB0706C8D0F}"); + } +} + +namespace GitHub.Services.Identity.Client +{ + public static class PropertyCacheResourceIds + { + public const string AreaName = "Cache"; + public const string AreaId = "0B808CEB-EF49-4C5E-9483-600A4ECF1224"; + + public const string PropertyCacheResource = "Properties"; + public static readonly Guid PropertyCache = new Guid("{656342EB-AE7D-4FF2-802F-19C6E35B0FE6}"); + } +} + +namespace GitHub.Services.Security +{ + public static class LocationResourceIds + { + public const string SecurityBackingStoreArea = "SBS"; + + public const string SecurityBackingStoreNamespaceResource = "SBSNamespace"; + public static readonly Guid SecurityBackingStoreNamespace = new Guid("049929B0-79E1-4AD5-A548-9E192D5C049E"); + + public const string SecurityBackingStoreAclStoreResource = "SBSAclStore"; + public static readonly Guid SecurityBackingStoreAclStore = new Guid("D9DA18E4-274B-4DD4-B09D-B8B931AF3826"); + + public const string SecurityBackingStoreAclsResource = "SBSAcls"; + public static readonly Guid SecurityBackingStoreAcls = new Guid("3F95720D-2EF6-47CC-B5D7-733561D13EB9"); + + public const string SecurityBackingStoreAcesResource = "SBSAces"; + public static readonly Guid SecurityBackingStoreAces = new Guid("AB821A2B-F383-4C72-8274-8425ED30835D"); + + public const string SecurityBackingStoreInheritResource = "SBSInherit"; + public static readonly Guid SecurityBackingStoreInherit = new Guid("25DCFFD2-9F2A-4109-B4CC-000F8472107D"); + + public const string SecurityBackingStoreTokensResource = "SBSTokens"; + public static readonly Guid SecurityBackingStoreTokens = new Guid("466ECEAD-D7F1-447C-8BC1-52C22592B98E"); + + public const string SecurityServiceArea = "Security"; + + public const string SecurityPermissionsResource = "Permissions"; + public static readonly Guid SecurityPermissions = new Guid("DD3B8BD6-C7FC-4CBD-929A-933D9C011C9D"); + + public const string SecurityPermissionEvaluationBatchResource = "PermissionEvaluationBatch"; + public static readonly Guid SecurityPermissionEvaluationBatch = new Guid("CF1FAA59-1B63-4448-BF04-13D981A46F5D"); + + public const string SecurityAccessControlEntriesResource = "AccessControlEntries"; + public static readonly Guid SecurityAccessControlEntries = new Guid("AC08C8FF-4323-4B08-AF90-BCD018D380CE"); + + public const string SecurityAccessControlListsResource = "AccessControlLists"; + public static readonly Guid SecurityAccessControlLists = new Guid("18A2AD18-7571-46AE-BEC7-0C7DA1495885"); + + public const string SecurityNamespacesResource = "SecurityNamespaces"; + public static readonly Guid SecurityNamespaces = new Guid("CE7B9F95-FDE9-4BE8-A86D-83B366F0B87A"); + } +} + +namespace GitHub.Services.Account +{ + public static class AccountResourceIds + { + public const string RegionArea = "Region"; + + public const string AreaId = "0D55247A-1C47-4462-9B1F-5E2125590EE6"; + + public const string AccountServiceArea = "Account"; + public static readonly Guid Account = new Guid("{229A6A53-B428-4FFB-A835-E8F36B5B4B1E}"); + public const string AccountResource = "Accounts"; + + public static readonly Guid AccountUserId = new Guid("{DFA3B963-C8BB-4CAF-BCAC-5C066B3B5793}"); + public const string AccountUserResource = "Users"; + + public const string HostMappingsResource = "HostMappings"; + public static readonly Guid HostsLocationid = new Guid("{DC2B7A91-2350-487B-9192-8099F28D6576}"); + + public static readonly Guid AccountTenantId = new Guid("{C58B3989-1E17-4A18-9925-67186FE66833}"); + public const string AccountTenantResource = "Tenant"; + + public static readonly Guid AccountRegionLocationId = new Guid("642A93C7-8385-4D63-A5A5-20D044FE504F"); + public const string AccountRegionResource = "Regions"; + + public static readonly Guid AccountNameAvailabilityid = new Guid("65DD1DC5-53FE-4C67-9B4E-0EC3E2539998"); + public const string AccountNameAvailabilityResource = "Availability"; + + public static readonly Guid AccountSettingsid = new Guid("4E012DD4-F8E1-485D-9BB3-C50D83C5B71B"); + public const string AccountSettingsResource = "Settings"; + } +} + +namespace GitHub.Services.ClientNotification +{ + public static class ClientNotificationResourceIds + { + public const string AreaId = "C2845FF0-342A-4059-A831-AA7A5BF00FF0"; + public const string AreaName = "ClientNotification"; + + public const string SubscriptionsResource = "Subscriptions"; + public static readonly Guid SubscriptionsLocationid = new Guid("E037C69C-5AD1-4B26-B340-51C18035516F"); + + public const string NotificationsResource = "Notifications"; + public static readonly Guid NotificationsLocationid = new Guid("7F325780-EAD9-4C90-ACD1-2ECF621CE348"); + } +} + +namespace GitHub.Services.Licensing +{ + public static class LicensingResourceIds + { + public const string AreaId = "C73A23A1-59BB-458C-8CE3-02C83215E015"; + public const string AreaName = "Licensing"; + + public const string CertificateResource = "Certificate"; + public static readonly Guid CertificateLocationid = new Guid("2E0DBCE7-A327-4BC0-A291-056139393F6D"); + + public const string ClientRightsResource = "ClientRights"; + public static readonly Guid ClientRightsLocationid = new Guid("643C72DA-EAEE-4163-9F07-D748EF5C2A0C"); + + public const string MsdnResource = "Msdn"; + public const string MsdnPresenceLocationIdString = "69522C3F-EECC-48D0-B333-F69FFB8FA6CC"; + public static readonly Guid MsdnPresenceLocationId = new Guid(MsdnPresenceLocationIdString); + public const string MsdnEntitlementsLocationIdString = "1cc6137e-12d5-4d44-a4f2-765006c9e85d"; + public static readonly Guid MsdnEntitlementsLocationId = new Guid(MsdnEntitlementsLocationIdString); + + public const string ExtensionRightsResource = "ExtensionRights"; + public static readonly Guid ExtensionRightsLocationId = new Guid("5F1DBE21-F748-47C7-B5FD-3770C8BC2C08"); + + public const string ExtensionLicenseResource = "ExtensionRegistration"; + public static readonly Guid ExtensionLicenseLocationId = new Guid("004A420A-7BEF-4B7F-8A50-22975D2067CC"); + + public const string UsageRightsResource = "UsageRights"; + public static readonly Guid UsageRightsLocationid = new Guid("D09AC573-58FE-4948-AF97-793DB40A7E16"); + + public const string ServiceRightsResource = "ServiceRights"; + public static readonly Guid ServiceRightsLocationid = new Guid("78ED2F48-D449-412D-8772-E4E97317B7BE"); + + public const string UsageResource = "Usage"; + public static readonly Guid UsageLocationid = new Guid("D3266B87-D395-4E91-97A5-0215B81A0B7D"); + + public const string EntitlementsResource = "Entitlements"; + public const string EntitlementsBatchResource = "EntitlementsBatch"; + public const string ExtensionsAssignedToAccountResource = "AccountAssignedExtensions"; + public const string ExtensionsAssignedToAccountLocationIdString = "01BCE8D3-C130-480F-A332-474AE3F6662E"; + public static readonly Guid ExtensionsAssignedToAccountLocationId = new Guid(ExtensionsAssignedToAccountLocationIdString); + + public const string EntitlementsLocationIdString = "EA37BE6F-8CD7-48DD-983D-2B72D6E3DA0F"; + public static readonly Guid EntitlementsLocationid = new Guid(EntitlementsLocationIdString); + public const string UserEntitlementsLocationIdString = "6490E566-B299-49A7-A4E4-28749752581F"; + public static readonly Guid UserEntitlementsLocationId = new Guid(UserEntitlementsLocationIdString); + + public const string UserEntitlementsBatchLocationIdString = "CC3A0130-78AD-4A00-B1CA-49BEF42F4656"; + public static readonly Guid UserEntitlementsBatchLocationId = new Guid(UserEntitlementsBatchLocationIdString); + + public const string CurrentUserEntitlementsLocationIdString = "C01E9FD5-0D8C-4D5E-9A68-734BD8DA6A38"; + public static readonly Guid CurrentUserEntitlementsLocationId = new Guid(CurrentUserEntitlementsLocationIdString); + public const string AssignAvailableEntitlementsLocationIdString = "C01E9FD5-0D8C-4D5E-9A68-734BD8DA6A38"; + public static readonly Guid AssignAvailableEntitlementsLocationId = new Guid(AssignAvailableEntitlementsLocationIdString); + + public const string ExtensionEntitlementsResource = "ExtensionEntitlements"; + public const string UserExtensionEntitlementsLocationIdString = "8CEC75EA-044F-4245-AB0D-A82DAFCC85EA"; + public static readonly Guid UserExtensionEntitlementsLocationId = new Guid(UserExtensionEntitlementsLocationIdString); + public const string ExtensionEntitlementsLocationIdString = "5434F182-7F32-4135-8326-9340D887C08A"; + public static readonly Guid ExtensionEntitlementsLocationId = new Guid(ExtensionEntitlementsLocationIdString); + + public const string TransferIdentitiesExtensionsResource = "TransferIdentitiesExtensions"; + public const string TransferIdentitiesExtensionsLocationIdString = "DA46FE26-DBB6-41D9-9D6B-86BF47E4E444"; + public static readonly Guid TransferIdentitiesExtensionsLocationId = + new Guid(TransferIdentitiesExtensionsLocationIdString); + + public const string ExtensionEntitlementsBatchResource = "ExtensionEntitlementsBatch"; + public const string UsersBatchExtensionEntitlementsLocationIdString = "1D42DDC2-3E7D-4DAA-A0EB-E12C1DBD7C72"; + public static readonly Guid UsersBatchExtensionEntitlementsLocationId = new Guid(UsersBatchExtensionEntitlementsLocationIdString); + + public const string LicensingRightsResource = "LicensingRights"; + public const string LicensingRightsLocationIdString = "8671B016-FA74-4C88-B693-83BBB88C2264"; + public static readonly Guid LicensingRightsLocationId = new Guid(LicensingRightsLocationIdString); + + public const string LicensingSettingsResource = "Settings"; + public const string LicensingSettingsLocationIdString = "6BA7740F-A387-4D74-B71A-969A9F2B49FB"; + public static readonly Guid LicensingSettingsLocationId = new Guid(LicensingSettingsLocationIdString); + } + + public static class LicensingResourceVersions + { + public const int AccountRightsResourcePreviewVersion = 1; + public const int CertificateResourcePreviewVersion = 1; + public const int ClientRightsResourcePreviewVersion = 1; + public const int UsageRightsResourcePreviewVersion = 1; + public const int ServiceRightsResourceRtmVersion = 1; + public const int AccountUsageResourceRtmVersion = 1; + public const int EntitlementResourceRtmVersion = 1; + public const int EntitlementsBatchResourcePreviewVersion = 1; + public const int LicensingRightsResourceRtmVersion = 1; + public const int MsdnResourceRtmVersion = 1; + public const int ExtensionRightsResourceRtmVersion = 1; + public const int ExtensionLicenseResourceRtmVersion = 1; + public const int ExtensionEntitlementsResourceRtmVersion = 1; + public const int ExtensionEntitlementsBatchResourceRtmVersion = 1; + public const int ExtensionEntitlementsBatch2ResourceRtmVersion = 2; + public const int TransferExtensionsForIdentitiesRtmVersion = 1; + public const int LicensingSettingsResourceRtmVersion = 1; + } +} + +namespace GitHub.Services.GroupLicensingRule +{ + public static class LicensingRuleResourceIds + { + public const string AreaId = "4F9A6C65-A750-4DE3-96D3-E4BCCF3A39B0"; + public const string AreaName = "LicensingRule"; + + public static class GroupLicensingRules + { + public const string GroupLicensingRulesResourceName = "GroupLicensingRules"; + public const string GroupLicensingRuleLocationIdString = "1DAE9AF4-C85D-411B-B0C1-A46AFAEA1986"; + public static readonly Guid GroupLicensingRuleLocationId = new Guid(GroupLicensingRuleLocationIdString); + } + + public static class GroupLicensingRulesLookup + { + public const string GroupLicensingRulesLookupResourceName = "GroupLicensingRulesLookup"; + public const string GroupLicensingRulesLookupResourceLocationIdString = "6282B958-792B-4F26-B5C8-6D035E02289F"; + public static readonly Guid GroupLicensingRulesLookupResourceLocationId = new Guid(GroupLicensingRulesLookupResourceLocationIdString); + } + + public static class GroupLicensingRulesUserApplication + { + public const string GroupLicensingRulesUserApplicationResourceName = "GroupLicensingRulesUserApplication"; + public const string GroupLicensingRulesUserApplicationResourceLocationIdString = "74A9DE62-9AFC-4A60-A6D9-F7C65E028619"; + public static readonly Guid GroupLicensingRulesUserApplicationResourceLocationId = new Guid(GroupLicensingRulesUserApplicationResourceLocationIdString); + } + + public static class GroupLicensingRulesApplication + { + public const string GroupLicensingRulesApplicationResourceName = "GroupLicensingRulesApplication"; + public const string GroupLicensingRulesApplicationResourceLocationIdString = "14602853-288e-4711-a613-c3f27ffce285"; + public static readonly Guid GroupLicensingRulesApplicationResourceLocationId = new Guid(GroupLicensingRulesApplicationResourceLocationIdString); + } + public static class GroupLicensingRulesApplicationStatus + { + public const string GroupLicensingRulesApplicationStatusResourceName = "GroupLicensingRulesApplicationStatus"; + public const string GroupLicensingRulesApplicationStatusResourceLocationIdString = "8953c613-d07f-43d3-a7bd-e9b66f960839"; + public static readonly Guid GroupLicensingRulesApplicationStatusResourceLocationId = new Guid(GroupLicensingRulesApplicationStatusResourceLocationIdString); + } + + public static class GroupLicensingRulesEvaluationLog + { + public const string GroupLicensingRulesEvaluationLogResourceName = "GroupLicensingRulesEvaluationLog"; + public const string GroupLicensingRulesEvaluationLogResourceLocationIdString = "C3C87024-5143-4631-94CE-CB2338B04BBC"; + public static readonly Guid GroupLicensingRulesEvaluationLogResourceLocationId = new Guid(GroupLicensingRulesEvaluationLogResourceLocationIdString); + } + } + + public static class GroupLicensingResourceVersions + { + public const int GroupLicensingRulesResourceVersion = 1; + public const int GroupLicensingRulesLookupResourceVersion = 1; + } +} + +namespace GitHub.Services.Invitation +{ + public static class InvitationResourceVersions + { + public const int InvitationAPIVersion = 1; + } + + public static class InvitationResourceIds + { + public const string AreaId = "287A6D53-7DC8-4618-8D57-6945B848A4AD"; + public const string AreaName = "Invitation"; + + public const string InvitationsResourceName = "Invitations"; + public const string InvitationsLocationIdString = "BC7CA053-E204-435B-A143-6240BA8A93BF"; + public static readonly Guid InvitationsLocationId = new Guid(InvitationsLocationIdString); + } +} + +namespace GitHub.Services.Compliance +{ + public static class ComplianceResourceVersions + { + public const int AccountRightsResourceVersion = 1; + public const int ConfigurationResourceVersion = 1; + public const int ValidationResourceVersion = 1; + } + + public static class ComplianceResourceIds + { + public const string AreaId = "7E7BAADD-B7D6-46A0-9CE5-A6F95DDA0E62"; + public const string AreaName = "Compliance"; + + public const string AccountRightsResource = "AccountRights"; + public static readonly Guid AccountRightsLocationId = new Guid("5FCEC4F4-491A-473D-B2F9-205977E66F01"); + + public const string ConfigurationResource = "Configuration"; + public static readonly Guid ConfigurationLocationId = new Guid("64076419-AC67-4F85-B709-B8C28D5B4F1D"); + + public const string ValidationResource = "Validation"; + public static readonly Guid ValidationLocationId = new Guid("A9994840-76C7-4C5B-97CF-2B353AD0E01C"); + } +} + +namespace GitHub.Services.Profile +{ + public static class ProfileResourceIds + { + public const string AreaId = "8CCFEF3D-2B87-4E99-8CCB-66E343D2DAA8"; + public const string AreaName = "Profile"; + public static readonly Guid AreaIdGuid = new Guid(AreaId); + + public const string ProfileHttpClientV2AreaId = "31C4AD39-B95A-4AC2-87FE-D8CE878D32A8"; + + public const string ProfileResource = "Profiles"; + public static readonly Guid ProfileLocationid = new Guid("F83735DC-483F-4238-A291-D45F6080A9AF"); + + public const string UserDefaultsResource = "UserDefaults"; + public static readonly Guid UserDefaultsLocationId = new Guid("B583A356-1DA7-4237-9F4C-1DEB2EDBC7E8"); + + public const string AttributeResource = "Attributes"; + public static readonly Guid AttributeLocationid = new Guid("EF743E8C-9A94-4E55-9392-CCFE55CFAE55"); + public static readonly Guid AttributeLocationId2 = new Guid("1392B6AC-D511-492E-AF5B-2263E5545A5D"); + + public const string AvatarResource = "Avatar"; + public static readonly Guid AvatarLocationid = new Guid("855C48A5-ED0C-4762-A640-3D212B2244B8"); + public static readonly Guid Avatar2LocationId = new Guid("67436615-B382-462A-B659-5367A492FB3C"); + + public const string DisplayNameResource = "DisplayName"; + public static readonly Guid DisplayNameLocationid = new Guid("5D969C0D-9A4A-45AB-A4EA-0C902AF8D39C"); + + public const string PublicAliasResource = "PublicAlias"; + public static readonly Guid PublicAliasLocationid = new Guid("B63E58B3-B830-40EA-A382-C198E6E9BB2C"); + + public const string EmailAddressResource = "EmailAddress"; + public static readonly Guid EmailAddressLocationid = new Guid("F47E1E09-08B3-436F-A541-495B3088635A"); + + public const string CountryResource = "Country"; + public static readonly Guid CountryLocationid = new Guid("C96428D6-5805-48A4-B4FD-DC6F1C39BE92"); + + public const string TermsOfServiceResource = "TermsOfService"; + public static readonly Guid TermsOfServiceLocationid = new Guid("E3411396-DA5F-4757-AA9E-521B48EEF625"); + + public const string PreferredEmailConfirmationResource = "PreferredEmailConfirmation"; + public static readonly Guid PreferredEmailConfirmationLocationid = new Guid("238437E4-73B9-4BB9-B467-DE4E5DC0FC78"); + + public const string CountriesResource = "Countries"; + public static readonly Guid CountriesLocationid = new Guid("775F46ED-26B3-4A6F-B7B1-01CF195ACDD0"); + + public const string SupportedLcidsResource = "SupportedLcids"; + public static readonly Guid SupportedLcidsLocationId = new Guid("D5BD1AA6-C269-4BCD-AD32-75FA17475584"); + + public const string RegionsResource = "Regions"; + public static readonly Guid RegionsLocationId = new Guid("92D8D1C9-26B8-4774-A929-D640A73DA524"); + + public const string LocationsResource = "Locations"; + public static readonly Guid LocationsLocationid = new Guid("EEA7DE6F-00A4-42F3-8A29-1BA615691880"); + + public const string LatestTosResource = "LatestTermsofService"; + public static readonly Guid LatestTosLocationid = new Guid("A4A9FB9D-FD32-4F9A-95A8-4B05FAF8C661"); + + public const string SettingsResource = "Settings"; + public static readonly Guid SettingsLocationid = new Guid("5081DFF5-947B-4CE6-9BBE-6C7C094DDCE0"); + + public const string GeoRegionResource = "GeoRegion"; + public static readonly Guid GeoRegionLocationid = new Guid("3BCDA9C0-3078-48A5-A1E0-83BD05931AD0"); + + public const string MigratingProfilesResource = "MigratingProfiles"; + public static readonly Guid MigratingProfilesLocationid = new Guid("397E8E6D-00BB-405F-90F4-02B38B2AC8F6"); + } + + public static class ProfileResourceVersions + { + public const int GenericResourcePreviewVersion = 1; + + public const int ProfileResourcePreviewVersion = 1; + public const int ProfileResourceRcVersion = 2; + public const int ProfileResourceRtmVersion = 3; + + public const int AttributeResourcePreviewVersion = 1; + public const int AttributeResourceRcVersion = 2; + } +} + +namespace GitHub.Services.FileContainer +{ + public static class FileContainerResourceIds + { + public const string FileContainerServiceArea = "Container"; + public const string FileContainerIdString = "E4F5C81E-E250-447B-9FEF-BD48471BEA5E"; + public const string BrowseFileContainerIdString = "E71A64AC-B2B5-4230-A4C0-DAD657CF97E2"; + + public static readonly Guid FileContainer = new Guid(FileContainerIdString); + public static readonly Guid BrowseFileContainer = new Guid(BrowseFileContainerIdString); + + public const string FileContainerResource = "Containers"; + } +} + +namespace GitHub.Services.WebApi +{ + public static class CvsFileDownloadResourceIds + { + public const string AreaName = "CvsFileDownload"; + + public const string LocationIdString = "0CF03C5A-D16D-4297-BFEB-F38A56D86670"; + public static readonly Guid LocationId = new Guid(LocationIdString); + + public const string Resource = "CvsFileDownload"; + } +} + +namespace GitHub.Services.Commerce +{ + public static class CommerceResourceIds + { + public const string AreaId = "365D9DCD-4492-4AE3-B5BA-AD0FF4AB74B3"; + public const string AreaName = "Commerce"; + + public const string MeterResource = "Meters"; + public static readonly Guid MeterLocationid = new Guid("AFB09D56-7740-4EB0-867F-792021FAB7C9"); + + public const string CommercePackageResource = "CommercePackage"; + public static readonly Guid CommercePackageLocationId = new Guid("E8135F49-A1DC-4135-80F4-120BBFC2ACF0"); + + public const string UsageEventResource = "UsageEvents"; + public static readonly Guid UsageEventLocationid = new Guid("EED7D28A-12A9-47ED-9A85-91A76C63E74B"); + + public const string ReportingEventResource = "ReportingEvents"; + public static readonly Guid ReportingEventLocationId = new Guid("E3296A33-647F-4A09-85C6-64B9259DADB8"); + + public const string SubscriptionResource = "Subscription"; + public static readonly Guid SubscriptionLocationId = new Guid("64485509-D692-4B70-B440-D02B3B809820"); + + public const string RegionsResource = "Regions"; + public static readonly Guid RegionsLocationId = new Guid("9527c79d-9f3e-465d-8178-069106c39457"); + + public const string OfferSubscriptionResource = "OfferSubscription"; + public static readonly Guid OfferSubscriptionResourceId = new Guid("E8950CE5-80BC-421F-B093-033C18FD3D79"); + + public const string OfferMeterResource = "OfferMeter"; + public static readonly Guid OfferMeterLocationId = new Guid("8B79E1FB-777B-4D0A-9D2E-6A4B2B8761B9"); + + public const string OfferMeterPriceResource = "OfferMeterPrice"; + public static readonly Guid OfferMeterPriceLocationId = new Guid("1C67C343-2269-4608-BC53-FE62DAA8E32B"); + + public const string ConnectedServerResource = "ConnectedServer"; + public static readonly Guid ConnectedServerLocationId = new Guid("C9928A7A-8102-4061-BDCE-B090068C0D2B"); + + public const string PurchaseRequestResource = "PurchaseRequest"; + public static readonly Guid PurchaseRequestLocationId = new Guid("A349B796-BDDB-459E-8921-E1967672BE86"); + + public const string ResourceMigrationResource = "ResourceMigration"; + public static readonly Guid ResourceMigrationLocationId = new Guid("2F11E604-83B2-4596-B3C6-242BAB102DA3"); + + public const string CommerceHostHelperResource = "CommerceHostHelperResource"; + public static readonly Guid CommerceHostHelperLocationId = new Guid("8B4C702A-7449-4FEB-9B23-ADD4288DDA1A"); + } + + public static class CommerceResourceVersions + { + public const int MeterV1Resources = 1; + + public const int MeterV2Resources = 2; + + public const int BillingV1Resources = 1; + + public const int OfferMeterV1Resources = 1; + + public const int OfferMeterPriceV1Resources = 1; + + public const int CommercePackageV1Resources = 1; + + public const int ReportingV1Resources = 1; + + public const int PurchaseRequestV1Resources = 1; + + public const int ResourceMigrationV1Resources = 1; + + public const int InfrastructureOrganizationV1Resources = 1; + } + + public static class CsmResourceIds + { + public const string AreaId = "B3705FD5-DC18-47FC-BB2F-7B0F19A70822"; + public const string AreaName = "Csm"; + + public const string ExtensionResourceResource = "ExtensionResource"; + public static readonly Guid ExtensionResourceLocationId = new Guid("9cb405cb-4a72-4a50-ab6d-be1da1726c33"); + + public const string ExtensionResourceGroupResource = "ExtensionResourceGroup"; + public static readonly Guid ExtensionResourceGroupLocationId = new Guid("a509d9a8-d23f-4e0f-a69f-ad52b248943b"); + + public const string AccountResourceResource = "AccountResource"; + public static readonly Guid AccountResourceResourceLocationId = new Guid("5745408e-6e9e-49c7-92bf-62932c8df69d"); + + public const string AccountResourceGroupResource = "AccountResourceGroup"; + public static readonly Guid AccountResourceGroupLocationId = new Guid("73d8b171-a2a0-4ac6-ba0b-ef762098e5ec"); + + public const string SubscriptionResourceGroupResource = "SubscriptionResourceGroup"; + public static readonly Guid SubscriptionResourceGroupLocationId = new Guid("f34be62f-f215-4bda-8b57-9e8a7a5fd66a"); + + public const string AccountResourceOperationsResource = "AccountResourceOperations"; + public static readonly Guid AccountResourceOperationsLocationId = new Guid("454d976b-812e-4947-bc4e-c2c23160317e"); + + public const string NameAvailabilityResource = "NameAvailability"; + public static readonly Guid NameAvailabilityResourceLocationId = new Guid("031d6b9b-a0d4-4b46-97c5-9ddaca1aa5cd"); + + public const string SubscriptionEventsResource = "SubscriptionEvents"; + public static readonly Guid SubscriptionEventsLocationId = new Guid("97bc4c4d-ce2e-4ca3-87cc-2bd07aeee500"); + + public const string ResourceGroupsResourceName = "ResourceGroups"; + public static readonly Guid ResourceGroupsResourceLocationId = new Guid("9e0fa51b-9d61-4899-a5a1-e1f0f5e75bc0"); + } + + public static class CommerceServiceResourceIds + { + // Offer Meter Area + public const string OfferMeterAreaId = "000080C1-AA68-4FCE-BBC5-C68D94BFF8BE"; + public const string OfferMeterAreaName = "OfferMeter"; + + public const string OfferMeterLocationString = "81E37548-A9E0-49F9-8905-650A7260A440"; + public static readonly Guid OfferMeterLocationId = new Guid(OfferMeterLocationString); + public const string OfferMeterResource = "OfferMeter"; + + public const string OfferMeterPriceResource = "OfferMeterPrice"; + public const string OfferMeterPriceLocationString = "D7197E00-DDDF-4029-9F9B-21B935A6CF9F"; + public static readonly Guid OfferMeterPriceLocationId = new Guid(OfferMeterPriceLocationString); + + // Meters Area + public const string MeterAreaId = "4C19F9C8-67BD-4C18-800B-55DC62C3017F"; + public const string MetersAreaName = "Meters"; + + public const string MeterResource = "Meters"; + public const string MeterLocationString = "4BD6E06B-1EDF-41A6-9BAF-D15B874DC539"; + public static readonly Guid MeterLocationid = new Guid(MeterLocationString); + + // Commerce Package Area + public const string CommercePackageAreaName = "Package"; + public const string CommercePackageAreaId = "45FB9450-A28D-476D-9B0F-FB4AEDDDFF73"; + + public const string CommercePackageResource = "CommercePackage"; + public const string CommercePackageLocationString = "A5E80D85-9718-44E0-BBED-461109268DBC"; + public static readonly Guid CommercePackageLocationId = new Guid(CommercePackageLocationString); + + // Usage Events Area + public const string UsageEventsAreaName = "UsageEvents"; + public const string UsageEventsAreaId = "3B16A4DB-B853-4C64-AA16-72138F5BB750"; + + public const string UsageEventsResource = "UsageEvents"; + public const string UsageEventsLocationString = "78741F74-E4F0-41B2-BB93-28C886443027"; + public static readonly Guid UsageEventLocationid = new Guid(UsageEventsLocationString); + + // Reporting Event Area + public const string ReportingEventsAreaName = "ReportingEvents"; + public const string ReportingEventsAreaId = "C890B7C4-5CF6-4280-91AC-331E439B8119"; + + public const string ReportingEventsResource = "ReportingEvents"; + public const string ReportingEventsLocationString = "D0BA838F-9253-46C5-ABB2-0ACF551C23D7"; + public static readonly Guid ReportingEventsLocationId = new Guid(ReportingEventsLocationString); + + // Subscription Area + public const string SubscriptionAreaId = "AC02550F-721A-4913-8EA5-CADAE535B03F"; + public const string SubscriptionAreaName = "Subscription"; + + public const string SubscriptionResource = "Subscription"; + public const string SubscriptionLocationString = "94DE86A2-03E3-42DB-A2E8-1A82BF13A262"; + public static readonly Guid SubscriptionLocationId = new Guid(SubscriptionLocationString); + + public const string AccountDetailsResource = "AccountDetails"; + public const string AccountDetailsLocationString = "0288F4E6-21D3-4529-AC5F-1719F99A4396"; + public static readonly Guid AccountDetailsLocationId = new Guid(AccountDetailsLocationString); + + // Region Area + public const string RegionsAreaName = "Regions"; + public const string RegionsAreaId = "A6ACEE79-C91A-47BA-87DF-AF36581833B6"; + + public const string RegionsResource = "Regions"; + public const string RegionsLocationString = "AAE8A531-9968-456F-9EF1-FE0ECF4724E8"; + public static readonly Guid RegionsLocationId = new Guid(RegionsLocationString); + + // Offer Subscription Area + public const string OfferSubscriptionAreaName = "OfferSubscription"; + public const string OfferSubscriptionAreaId = "5D4A2F52-5A08-41FB-8CCA-768ADD070E18"; + + public const string OfferSubscriptionResource = "OfferSubscription"; + public const string OfferSubscriptionLocationString = "7C13D166-01C5-4CCD-8A75-E5AD6AB3B0A6"; + public static readonly Guid OfferSubscriptionResourceId = new Guid(OfferSubscriptionLocationString); + + // Connected Server Area + public const string ConnectedServerAreaName = "ConnectedServer"; + public const string ConnectedServerAreaId = "05A2B228-317C-4886-9FE9-828F9EA3815A"; + + public const string ConnectedServerResource = "ConnectedServer"; + public const string ConnectedServerLocationString = "AB6E0E2F-A3CA-4478-BAFC-8E7AD022BE01"; + public static readonly Guid ConnectedServerLocationId = new Guid(ConnectedServerLocationString); + + // Purchase Request Area + public const string PurchaseRequestAreaName = "PurchaseRequest"; + public const string PurchaseRequestAreaId = "9D439667-F8CF-4991-89A9-95CA6A763327"; + + public const string PurchaseRequestResource = "PurchaseRequest"; + public const string PurchaseRequestLocationString = "6F905B2D-292A-4D30-B38A-2D254EAB06B7"; + public static readonly Guid PurchaseRequestLocationId = new Guid(PurchaseRequestLocationString); + + // Resource Migration Area + public const string ResourceMigrationAreaName = "ResourceMigration"; + public const string ResourceMigrationAreaId = "FFCFC36A-0BE8-412A-A2BB-93C2ABD4048B"; + + public const string ResourceMigrationResource = "ResourceMigration"; + public const string ResourceMigrationLocationString = "00432895-B3F6-488C-BA71-792FA5E07383"; + public static readonly Guid ResourceMigrationLocationId = new Guid(ResourceMigrationLocationString); + } + + public static class CsmResourceProviderResourceIds + { + public const string AreaId = "2900E97E-7BBD-4D87-95EE-BE54611B6184"; + public const string AreaName = "CsmResourceProvider"; + + public const string ExtensionResourceResource = "VssExtensionResource"; + public static readonly Guid ExtensionResourceLocationId = new Guid("8DF1CB68-197E-4BAF-8CE2-C96021879971"); + + public const string ExtensionResourceGroupResource = "VssExtensionResourceGroup"; + public static readonly Guid ExtensionResourceGroupLocationId = new Guid("E14787AB-FBD5-4064-A75D-0603C9ED66A8"); + + public const string AccountResourceResource = "VssAccountResource"; + public static readonly Guid AccountResourceResourceLocationId = new Guid("58FA3A85-AF20-408D-B46D-6D369408E3DA"); + + public const string AccountResourceGroupResource = "VssAccountResourceGroup"; + public static readonly Guid AccountResourceGroupLocationId = new Guid("955956A7-FBEB-48E6-9D78-C60F3F84BAE9"); + + public const string SubscriptionResourceGroupResource = "VssSubscriptionResourceGroup"; + public static readonly Guid SubscriptionResourceGroupLocationId = new Guid("8A066194-3817-4E76-9BBC-2A1446FA0FC5"); + + public const string AccountResourceOperationsResource = "VssAccountResourceOperations"; + public static readonly Guid AccountResourceOperationsLocationId = new Guid("14917175-ECBE-453B-B436-50430219EBA9"); + + public const string NameAvailabilityResource = "VssNameAvailability"; + public static readonly Guid NameAvailabilityResourceLocationId = new Guid("7DBAE6E1-993E-4AC9-B20D-6A39EEE4028B"); + + public const string SubscriptionEventsResource = "VssSubscriptionEvents"; + public static readonly Guid SubscriptionEventsLocationId = new Guid("A7F5BE2F-9AF8-4CC2-863F-D07377B2C079"); + + public const string ResourceGroupsResource = "VssResourceGroups"; + public static readonly Guid ResourceGroupsResourceLocationId = new Guid("8D9245EE-19A2-45B2-BE3E-03234122298E"); + } +} + +namespace GitHub.Services.Health +{ + public static class HealthResourceIds + { + public const string HealthArea = "Health"; + public const string HealthResource = "Health"; + public static readonly Guid HealthLocationId = new Guid("30964BA7-2A11-4792-B7BA-DF191DBCC3BB"); + } +} + +namespace GitHub.Services.ActivityStatistic +{ + public static class ActivityStatisticIds + { + public const string ActivityStatisticArea = "Stats"; + public const string ActivityStatisticResource = "Activities"; + public static readonly Guid ActivityStatisticId = new Guid("5F4C431A-4D8F-442D-96E7-1E7522E6EABD"); + } +} + +namespace GitHub.Services.ContentSecurityPolicy +{ + public static class ContentSecurityPolicyResourceIds + { + public const string CspReportArea = "CspReport"; + public const string CspReportResource = "CspReport"; + public static readonly Guid CspLocationId = new Guid("FA48A6B6-C4A9-42B4-AFE7-2640F68F99B6"); + } +} + +namespace GitHub.Services.Location +{ + [GenerateAllConstants] + public static class LocationResourceIds + { + public const string LocationServiceArea = "Location"; + + public const string ConnectionDataResource = "ConnectionData"; + public static readonly Guid ConnectionData = new Guid("{00D9565F-ED9C-4A06-9A50-00E7896CCAB4}"); + + public const string ServiceDefinitionsResource = "ServiceDefinitions"; + public static readonly Guid ServiceDefinitions = new Guid("{D810A47D-F4F4-4A62-A03F-FA1860585C4C}"); + + public const string AccessMappingsResource = "AccessMappings"; + public static readonly Guid AccessMappings = new Guid("{A52F2F69-B171-4E88-9DFE-34B44CF7E386}"); + + public const string ResourceAreasResource = "ResourceAreas"; + public static readonly Guid ResourceAreas = new Guid("E81700F7-3BE2-46DE-8624-2EB35882FCAA"); + + // Used for updating the SPS locations in account migrations. + public const string SpsServiceDefintionResource = "SpsServiceDefinition"; + + public static readonly Guid SpsServiceDefinition = new Guid("{DF5F298A-4E06-4815-A13E-6CE90A37EFA4}"); + } +} + +namespace GitHub.Services.Notification +{ + public static class PersistedNotificationResourceIds + { + public const string AreaId = "BA8495F8-E9EE-4A9E-9CBE-142897543FE9"; + public const string AreaName = "PersistedNotification"; + + public static readonly Guid NotificationsId = new Guid("E889FFCE-9F0A-4C6C-B749-7FB1ECFA6950"); + public const string NotificationsResource = "Notifications"; + + public static readonly Guid RecipientMetadataId = new Guid("1AAFF2D2-E2F9-4784-9F93-412A9F2EFD86"); + public const string RecipientMetadataResource = "RecipientMetadata"; + } + + public static class PersistedNotificationResourceVersions + { + public const int NotificationsResourcePreviewVersion = 1; + public const int RecipientMetadataPreviewVersion = 1; + } +} + +namespace GitHub.Services.Operations +{ + [GenerateAllConstants] + public static class OperationsResourceIds + { + public const string AreaName = "operations"; + public const string OperationsResource = "operations"; + public const string OperationsRouteName = "Operations"; + public const string OperationsPluginRouteName = "OperationsPlugin"; + public const string OperationsApi = "OperationsApi"; + public const string TagOperationsLocationId = "9A1B74B4-2CA8-4A9F-8470-C2F2E6FDC949"; + public static readonly Guid OperationsLocationId = new Guid(TagOperationsLocationId); + public const string TagOperationsPluginLocationId = "7F82DF6D-7D09-46C1-A015-643B556B3A1E"; + public static readonly Guid OperationsPluginLocationId = new Guid(TagOperationsPluginLocationId); + } +} + +namespace GitHub.Services.Directories.DirectoryService +{ + public static class DirectoryResourceIds + { + public const string DirectoryServiceArea = "Directory"; + public const string DirectoryService = "2B98ABE4-FAE0-4B7F-8562-7141C309B9EE"; + + public const string MembersResource = "Members"; + public static readonly Guid Members = Guid.Parse("{89526A2C-E9E3-1F40-A3FB-54D16BDA15B0}"); + public static readonly Guid MemberStatusLocationId = Guid.Parse("{714914b2-ad3f-4933-bf2e-fc3cabb37696}"); + } +} + +namespace GitHub.Services.FeatureAvailability +{ + [GenerateAllConstants] + public static class FeatureAvailabilityResourceIds + { + public const string AreaId = "C8E5AF97-4B95-4E73-9E7F-69A06507967C"; + public const string FeatureAvailabilityAreaName = "FeatureAvailability"; + public static readonly Guid FeatureFlagsLocationId = Guid.Parse("{3E2B80F8-9E6F-441E-8393-005610692D9C}"); + } +} + +namespace GitHub.Services.IdentityPicker +{ + //Common identity picker in the framework + [GenerateAllConstants] + public static class CommonIdentityPickerResourceIds + { + public const string ServiceArea = "IdentityPicker"; + + public const string IdentitiesResource = "Identities"; + + public static readonly Guid IdentitiesLocationId = new Guid("4102F006-0B23-4B26-BB1B-B661605E6B33"); + public static readonly Guid IdentityAvatarLocationId = new Guid("4D9B6936-E96A-4A42-8C3B-81E8337CD010"); + public static readonly Guid IdentityFeatureMruLocationId = new Guid("839E4258-F559-421B-A38E-B6E691967AB3"); + public static readonly Guid IdentityConnectionsLocationId = new Guid("C01AF8FD-2A61-4811-A7A3-B85BCEC080AF"); + } +} + +namespace GitHub.Services.Settings +{ + [GenerateAllConstants] + public static class SettingsApiResourceIds + { + public const string SettingsAreaName = "Settings"; + + public const string SettingEntriesResource = "Entries"; + public const string SettingEntriesLocationIdString = "CD006711-163D-4CD4-A597-B05BAD2556FF"; + public static readonly Guid SettingEntriesLocationId = new Guid(SettingEntriesLocationIdString); + public const string NamedScopeSettingEntriesLocationIdString = "4CBAAFAF-E8AF-4570-98D1-79EE99C56327"; + public static readonly Guid NamedScopeSettingEntriesLocationId = new Guid(NamedScopeSettingEntriesLocationIdString); + } +} + +namespace GitHub.Services.WebPlatform +{ + [GenerateAllConstants] + public static class AuthenticationResourceIds + { + public const string AreaId = "A084B81B-0F23-4136-BAEA-98E07F3C7446"; + public const string AuthenticationAreaName = "WebPlatformAuth"; + public static readonly Guid AuthenticationLocationId = Guid.Parse("{11420B6B-3324-490A-848D-B8AAFDB906BA}"); + public const string SessionTokenResource = "SessionToken"; + } + + [GenerateAllConstants] + public static class CustomerIntelligenceResourceIds + { + public const string AreaId = "40132BEE-F5F3-4F39-847F-80CC44AD9ADD"; + public const string CustomerIntelligenceAreaName = "CustomerIntelligence"; + public static readonly Guid EventsLocationId = Guid.Parse("{B5CC35C2-FF2B-491D-A085-24B6E9F396FD}"); + } + + public static class ContributionResourceIds + { + public const string AreaId = "39675476-C858-48A1-A5CD-80ED65E86532"; + public const string AreaName = "Contribution"; + public const string HierarchyLocationIdString = "8EC9F10C-AB9F-4618-8817-48F3125DDE6A"; + public static readonly Guid HierarchyLocationId = Guid.Parse(HierarchyLocationIdString); + public const string HierarchyResource = "Hierarchy"; + public const string HierarchyQueryLocationIdString = "3353E165-A11E-43AA-9D88-14F2BB09B6D9"; + public static readonly Guid HierarchyQueryLocationId = Guid.Parse(HierarchyQueryLocationIdString); + public const string HierarchyQueryResource = "HierarchyQuery"; + } + + [GenerateAllConstants] + public static class ClientTraceResourceIds + { + public const string AreaId = "054EEB0E-108E-47DC-848A-7074B14774A9"; + public const string ClientTraceAreaName = "ClientTrace"; + public static readonly Guid EventsLocationId = Guid.Parse("{06BCC74A-1491-4EB8-A0EB-704778F9D041}"); + public const string ClientTraceEventsResource = "Events"; + } +} + +namespace GitHub.Services.Zeus +{ + [GenerateAllConstants] + public static class BlobCopyLocationIds + { + public const string ResourceString = "{8907fe1c-346a-455b-9ab9-dde883687231}"; + public static readonly Guid ResourceId = new Guid(ResourceString); + public const string ResouceName = "BlobCopyRequest"; + public const string AreaName = "BlobCopyRequest"; + } + + [GenerateAllConstants] + public static class DatabaseMigrationLocationIds + { + public const string ResourceString = "{D56223DF-8CCD-45C9-89B4-EDDF69240000}"; + public static readonly Guid ResourceId = new Guid(ResourceString); + public const string ResouceName = "DatabaseMigration"; + public const string AreaName = "DatabaseMigration"; + } +} + +namespace GitHub.Services.Identity.Mru +{ + [GenerateAllConstants] + public static class IdentityMruResourceIds + { + public const string AreaId = "FC3682BE-3D6C-427A-87C8-E527B16A1D05"; + public const string AreaName = "Identity"; + + public static readonly Guid MruIdentitiesLocationId = new Guid("15D952A1-BB4E-436C-88CA-CFE1E9FF3331"); + public const string MruIdentitiesResource = "MruIdentities"; + } +} + +namespace GitHub.Services.Servicing +{ + public static class ServicingResourceIds + { + public const string AreaName = "Servicing"; + + public static readonly Guid JobsLocationId = new Guid("807F536E-0C6D-46D9-B856-4D5F3C27BEF5"); + public static readonly Guid LogsLocationId = new Guid("B46254F3-9523-4EF8-B69E-FD6EED5D0BB8"); + public static readonly Guid ServiceLevelLocationId = new Guid("3C4BFE05-AEB6-45F8-93A6-929468401657"); + + public const string JobsResourceName = "Jobs"; + public const string LogsResourceName = "Logs"; + public const string ServiceLevelResource = "ServiceLevel"; + } +} + +namespace GitHub.Services.Auditing +{ + public static class AuditingResourceIds + { + public const string AreaName = "Auditing"; + + public static readonly Guid EndpointsLocationId = new Guid("D4AB3CD0-66BE-4551-844E-CC2C32FA64C5"); + public const string EndpointResourceName = "Endpoints"; + } +} + +namespace GitHub.Services.ServicePrincipal +{ + public static class ServicePrincipalResourceIds + { + public const string AreaName = "ServicePrincipal"; + public const string ServicePrincipalsResourceName = "ServicePrincipals"; + public static readonly Guid ServicePrincipalsLocationId = new Guid("992CB93B-847E-4683-88C9-848CD450FDF6"); + } +} + +namespace GitHub.Services.TokenSigningKeyLifecycle +{ + public static class TokenSigningKeyLifecycleResourceIds + { + public const string AreaName = "TokenSigning"; + public const string AreaId = "{f189ca86-04a2-413c-81a0-abdbd7c472da}"; + + public const string SigningKeysResourceName = "SigningKeys"; + public static readonly Guid SigningKeysLocationId = new Guid("62361140-9bb7-4d57-8223-12e6155ce354"); + + public const string NamespaceResourceName = "SigningNamespace"; + public static readonly Guid NamespaceLocationId = new Guid("29f94429-6088-4394-afd9-0435df55f079"); + } +} + +namespace GitHub.Services.GitHubConnector +{ + public static class GitHubConnectorResourceIds + { + public const string AreaId = "85738938-9FAE-4EB4-B4F0-871502E6B549"; + public const string AreaName = "GitHubConnector"; + + public static readonly Guid ResourceAreaId = new Guid(AreaId); + + public static readonly Guid ConnectionsResourceLocationId = new Guid("EBE1CF27-8F19-4955-A47B-09F125F06518"); + public const string ConnectionsResourceName = "Connections"; + + public static readonly Guid InstallationTokensResourceLocationId = new Guid("05188D9F-DD80-4C9E-BA91-4B0B3A8A67D7"); + public const string InstallationTokensResourceName = "InstallationTokens"; + + public static readonly Guid WebhookEventsResourceLocationId = new Guid("063EC204-5C0D-402F-86CF-36B1703E187F"); + public const string WebhookEventsResourceName = "WebhookEvents"; + + public static readonly Guid UserOAuthUrlsResourceLocationId = new Guid("9EA35039-A91F-4E02-A81D-573623FF7235"); + public const string UserOAuthUrlsResourceName = "UserOAuthUrls"; + + public const string DefaultResourceId = "default"; + } +} + +namespace GitHub.Services.Organization +{ + public static class OrganizationResourceIds + { + public const string AreaId = "0D55247A-1C47-4462-9B1F-5E2125590EE6"; + + public static readonly Guid ResourceAreaId = new Guid(AreaId); + + public const string OrganizationArea = "Organization"; + + public const string PropertiesResourceName = "Properties"; + + public const string LogoResourceName = "Logo"; + + // organization resources + public static readonly Guid OrganizationsResourceLocationId = new Guid("95F49097-6CDC-4AFE-A039-48B4D4C4CBF7"); + + public const string OrganizationsResourceName = "Organizations"; + + // organization properties resources + public static readonly Guid OrganizationPropertiesResourceLocationId = new Guid("103707C6-236D-4434-A0A2-9031FBB65FA6"); + + public const string OrganizationPropertiesResourceName = "OrganizationProperties"; + + // organization logo resources + public static readonly Guid OrganizationLogoResourceLocationId = new Guid("A9EEEC19-85B4-40AE-8A52-B4F697260AC4"); + + public const string OrganizationLogoResourceName = "OrganizationLogo"; + + // organization migration blobs resources + public static readonly Guid OrganizationMigrationBlobsResourceLocationId = new Guid("93F69239-28BA-497E-B4D4-33E51E6303C3"); + + public const string OrganizationMigrationBlobsResourceName = "OrganizationMigrationBlobs"; + + // collection resources + public static readonly Guid CollectionsResourceLocationId = new Guid("668B5607-0DB2-49BB-83F8-5F46F1094250"); + + public const string CollectionsResourceName = "Collections"; + + // collection properties resources + public static readonly Guid CollectionPropertiesResourceLocationId = new Guid("A0F9C508-A3C4-456B-A812-3FB0C4743521"); + + public const string CollectionPropertiesResourceName = "CollectionProperties"; + + // region resources + public static readonly Guid RegionsResourceLocationId = new Guid("6F84936F-1801-46F6-94FA-1817545D366D"); + + public const string RegionsResourceName = "Regions"; + } + + public static class OrganizationPolicyResourceIds + { + public const string OrganizationPolicyArea = "OrganizationPolicy"; + + // policy + public static readonly Guid PoliciesLocationId = new Guid("D0AB077B-1B97-4F78-984C-CFE2D248FC79"); + + public const string PoliciesResourceName = "Policies"; + + // policies batch + public static readonly Guid PoliciesBatchLocationId = new Guid("7EF423E0-59D8-4C00-B951-7143B18BD97B"); + + public const string PoliciesBatchResourceName = "PoliciesBatch"; + + // policy metadata + public static readonly Guid PolicyInformationLocationId = new Guid("222AF71B-7280-4A95-80E4-DCB0DEEAC834"); + + public const string PolicyInformationResourceName = "PolicyInformation"; + } +} + +namespace GitHub.Services.UserMapping +{ + public static class UserMappingResourceIds + { + public const string AreaId = "C8C8FFD0-2ECF-484A-B7E8-A226955EE7C8"; + public const string UserMappingArea = "UserMapping"; + + public static readonly Guid UserAccountMappingsResourceLocationId = new Guid("0DBF02CC-5EC3-4250-A145-5BEB580E0086"); + public const string UserAccountMappingsResourceName = "UserAccountMappings"; + } +} + +namespace GitHub.Services.TokenRevocation +{ + public static class TokenRevocationResourceIds + { + public const string AreaName = "TokenRevocation"; + public const string AreaId = "{3C25A612-6355-4A43-80FE-75AEBE07E981}"; + + public const string RulesResourceName = "Rules"; + public static readonly Guid RulesLocationId = new Guid("03923358-D412-40BA-A63F-36A1836C7706"); + } +} + +namespace GitHub.Services.MarketingPreferences +{ + public static class MarketingPreferencesResourceIds + { + public const string AreaId = "F4AA2205-FF00-4EEE-8216-C7A73CEE155C"; + public const string AreaName = "MarketingPreferences"; + + public const string ContactWithOffersResource = "ContactWithOffers"; + public static readonly Guid ContactWithOffersLocationid = new Guid("6E529270-1F14-4E92-A11D-B496BBBA4ED7"); + + public const string MarketingPreferencesResource = "MarketingPreferences"; + public static readonly Guid MarketingPreferencesLocationId = new Guid("0e2ebf6e-1b6c-423d-b207-06b1afdfe332"); + } + + public static class MarketingPreferencesResourceVersions + { + public const int GenericResourcePreviewVersion = 1; + } +} + +namespace GitHub.Services.HostAcquisition +{ + public static class HostAcquisitionResourceIds + { + public const string AreaName = "HostAcquisition"; + public const string AreaId = "8E128563-B59C-4A70-964C-A3BD7412183D"; + + public static readonly Guid ResourceAreaId = new Guid(AreaId); + + public const string HostAcquisitionArea = "HostAcquisition"; + + // collection resources + public static readonly Guid CollectionsResourceLocationId = new Guid("2BBEAD06-CA34-4DD7-9FE2-148735723A0A"); + + public const string CollectionsResourceName = "Collections"; + + // NameAvailability resources + public static readonly Guid NameAvailabilityResourceLocationId = new Guid("01A4CDA4-66D1-4F35-918A-212111EDC9A4"); + + public const string NameAvailabilityResourceName = "NameAvailability"; + + // region resources + public static readonly Guid RegionsResourceLocationId = new Guid("776EF918-0DAD-4EB1-A614-04988CA3A072"); + + public const string RegionsResourceName = "Regions"; + } +} + +namespace GitHub.Services.OAuthWhitelist +{ + public static class OAuthWhitelistResourceIds + { + public const string AreaId = "BED1E9DD-AE97-4D73-9E01-4797F66ED0D3"; + public const string AreaName = "OAuthWhitelist"; + + public const string OAuthWhitelistEntriesResource = "OAuthWhitelistEntries"; + public static readonly Guid OAuthWhitelistEntriesLocationId = new Guid("3AFD5B3F-12B1-4551-B6D7-B33E0E2D45D6"); + } +} + +namespace GitHub.Services.CentralizedFeature +{ + public class CentralizedFeatureResourceIds + { + public const string AreaName = "CentralizedFeature"; + public const string AreaId = "86BF2186-3092-4F5E-86A6-13997CE0924A"; + public static readonly Guid AreaIdGuid = new Guid(AreaId); + + public class Availability + { + public static readonly Guid LocationId = new Guid("EB8B51A6-1BE5-4337-B4C1-BAE7BCB587C2"); + public const string Resource = "Availability"; + } + } +} + +namespace GitHub.Services.AzureFrontDoor +{ + public static class AfdResourceIds + { + public const string AreaName = "AzureFrontDoor"; + + public const string AfdEndpointLookupResource = "AfdEndpointLookup"; + public static readonly Guid EndpointLookupLocationId = new Guid("39738637-F7C6-439A-82D7-83EFAA3A7DB4"); + } +} + +namespace GitHub.Services.WebApi +{ + public static class BasicAuthBatchResourceIds + { + public const string AreaName = "BasicAuthBatch"; + public const string AreaId = "31D56A90-A194-4567-AACF-EFE0007E3309"; + + public const string BasicAuthBatchResource = "BasicAuthBatch"; + public static readonly Guid BasicAuthBatch = new Guid("{8214680a-5c4a-4333-9b3c-228030c136f6}"); + } +} + +namespace GitHub.Services.PermissionLevel +{ + public static class PermissionLevelDefinitionResourceIds + { + public const string AreaName = "PermissionLevel"; + public const string AreaId = "E97D4D3C-C339-4745-A987-BD6F6C496788"; + + public static readonly Guid ResourceAreaId = new Guid(AreaId); + + public static readonly Guid PermissionLevelDefinitionsResourceLocationId = new Guid("D9247EA2-4E01-47C1-8662-980818AAE5D3"); + + public const string PermissionLevelDefinitionsResourceName = "PermissionLevelDefinitions"; + } + + public static class PermissionLevelAssignmentResourceIds + { + public const string AreaName = "PermissionLevel"; + public const string AreaId = "E97D4D3C-C339-4745-A987-BD6F6C496788"; + + public static readonly Guid ResourceAreaId = new Guid(AreaId); + public static readonly Guid PermissionLevelAssignmentsResourceLocationId = new Guid("005E0302-7988-4066-9AC0-1D93A42A9F0B"); + + public const string PermissionLevelAssignmentsResourceName = "PermissionLevelAssignments"; + } +} diff --git a/src/Sdk/WebApi/WebApi/ServiceEvent.cs b/src/Sdk/WebApi/WebApi/ServiceEvent.cs new file mode 100644 index 00000000000..b1813c1a68c --- /dev/null +++ b/src/Sdk/WebApi/WebApi/ServiceEvent.cs @@ -0,0 +1,91 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi +{ + // This is the event that shall be published on the service bus by different services for other first party services. + [DataContract] + public class ServiceEvent + { + private Object m_resource; + + /// + /// This is the id of the type. + /// Constants that will be used by subscribers to identify/filter events being published on a topic. + /// + [DataMember] + public String EventType { get; set; } + + /// + /// This is the service that published this event. + /// + [DataMember] + public Publisher Publisher { get; set; } + + /// + /// The resource object that carries specific information about the event. The object must have + /// the ServiceEventObject applied for serialization/deserialization to work. + /// + [DataMember] + public Object Resource + { + get + { + return m_resource; + } + set + { + Type type = value.GetType(); + if (!type.GetTypeInfo().GetCustomAttributes(true).Any()) + { + throw new InvalidOperationException($"Resource of type {type.FullName} must have ServiceEventObject attribute"); + } + m_resource = value; + } + } + + /// + /// This is the version of the resource. + /// + [DataMember] + public String ResourceVersion { get; set; } + + /// + /// This dictionary carries the context descriptors along with their ids. + /// + [DataMember] + public Dictionary ResourceContainers { get; set; } + } + + [DataContract] + public class Publisher + { + /// + /// Name of the publishing service. + /// + [DataMember] + public String Name { get; set; } + + /// + /// Service Owner Guid + /// Eg. Tfs : 00025394-6065-48CA-87D9-7F5672854EF7 + /// + [DataMember] + public Guid ServiceOwnerId { get; set; } + } + + public class ResourceContainerTypes + { + public const String Account = "Account"; + public const String Collection = "Collection"; + } + + [System.AttributeUsage(System.AttributeTargets.Class, Inherited = true)] + public class ServiceEventObjectAttribute : Attribute + { + public ServiceEventObjectAttribute() { } + } +} diff --git a/src/Sdk/WebApi/WebApi/TaskExtensions.cs b/src/Sdk/WebApi/WebApi/TaskExtensions.cs new file mode 100644 index 00000000000..01cef55a5f1 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/TaskExtensions.cs @@ -0,0 +1,65 @@ +using System.ComponentModel; +using System.Net.Http; +using System.Threading.Tasks; + +namespace GitHub.Services.WebApi +{ + //This class should be used by callers of derivatives of HttpClientBase to deal with + //getting a proper exception from a task, when you want to get a synchronous result + [EditorBrowsable(EditorBrowsableState.Never)] + public static class TaskExtensions + { + /// + /// Blocks until the task has completed, throwing the remote exception if one was raised. + /// + /// The task to await + [AsyncFixer.BlockCaller] + public static void SyncResult(this Task task) + { + // NOTE: GetResult() on TaskAwaiter uses ExceptionDispatchInfo.Throw if there + // is an exception, which preserves the original call stack and does not use + // AggregateException (unless explicitly thrown by the caller). + task.GetAwaiter().GetResult(); + } + + /// + /// Blocks until the task has completed, returning the result or throwing the remote exception if one was raised. + /// + /// The type for the result + /// The task to await + /// The result of the task + [AsyncFixer.BlockCaller] + public static T SyncResult(this Task task) + { + // NOTE: GetResult() on TaskAwaiter uses ExceptionDispatchInfo.Throw if there + // is an exception, which preserves the original call stack and does not use + // AggregateException (unless explicitly thrown by the caller). + return task.GetAwaiter().GetResult(); + } + + /// + /// Blocks until the task has completed, returning the result or throwing the remote exception if one was raised. + /// + /// The task to await + /// The result of the task + [AsyncFixer.BlockCaller] + public static HttpResponseMessage SyncResult(this Task task) + { + // NOTE: This is effectively the same as , + // but currently remains to support binary compatibility. + + // NOTE: GetResult() on TaskAwaiter uses ExceptionDispatchInfo.Throw if there + // is an exception, which preserves the original call stack and does not use + // AggregateException (unless explicitly thrown by the caller). + return task.GetAwaiter().GetResult(); + } + } +} + +namespace AsyncFixer +{ + [System.AttributeUsage(System.AttributeTargets.Method)] + public class BlockCaller : System.Attribute + { } +} + diff --git a/src/Sdk/WebApi/WebApi/Utilities/AsyncLock.cs b/src/Sdk/WebApi/WebApi/Utilities/AsyncLock.cs new file mode 100644 index 00000000000..2b5bd9d6081 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Utilities/AsyncLock.cs @@ -0,0 +1,57 @@ +using System; +using System.Threading; +using System.Threading.Tasks; + +namespace GitHub.Services.WebApi.Utilities +{ + /// + /// From: http://blogs.msdn.com/b/pfxteam/archive/2012/02/12/10266988.aspx + /// + internal sealed class AsyncLock + { + public AsyncLock() + { + m_releaser = Task.FromResult((IDisposable)new Releaser(this)); + } + + public Task LockAsync(CancellationToken cancellationToken = default(CancellationToken)) + { + // Don't pass cancellationToken to the semaphore. If we can't acquire the semaphore immediately + // we'll still get the waitTask returned immediately (with IsCompleted = false) + // and then we'll end up in the else block where we add a continuation to the waitTask which will honor the cancellationToken + Task waitTask = m_semaphore.WaitAsync(); + + if (waitTask.IsCompleted) + { + return m_releaser; + } + else + { + return waitTask.ContinueWith( + (task, state) => (IDisposable)state, + m_releaser.Result, + cancellationToken, + TaskContinuationOptions.ExecuteSynchronously, + TaskScheduler.Default); + } + } + + private readonly SemaphoreSlim m_semaphore = new SemaphoreSlim(1, 1); + private readonly Task m_releaser; + + private sealed class Releaser : IDisposable + { + internal Releaser(AsyncLock toRelease) + { + m_toRelease = toRelease; + } + + public void Dispose() + { + m_toRelease.m_semaphore.Release(); + } + + private readonly AsyncLock m_toRelease; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Utilities/BaseSecuredObject.cs b/src/Sdk/WebApi/WebApi/Utilities/BaseSecuredObject.cs new file mode 100644 index 00000000000..c36135f48d6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Utilities/BaseSecuredObject.cs @@ -0,0 +1,48 @@ +using System; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi +{ + [DataContract] + public abstract class BaseSecuredObject : ISecuredObject + { + protected BaseSecuredObject() + { + } + + protected BaseSecuredObject(ISecuredObject securedObject) + { + if (securedObject != null) + { + this.m_namespaceId = securedObject.NamespaceId; + this.m_requiredPermissions = securedObject.RequiredPermissions; + this.m_token = securedObject.GetToken(); + } + } + + Guid ISecuredObject.NamespaceId + { + get + { + return m_namespaceId; + } + } + + int ISecuredObject.RequiredPermissions + { + get + { + return m_requiredPermissions; + } + } + + string ISecuredObject.GetToken() + { + return m_token; + } + + internal Guid m_namespaceId; + internal int m_requiredPermissions; + internal string m_token; + } +} diff --git a/src/Sdk/WebApi/WebApi/Utilities/ClientGeneratorAttributes.cs b/src/Sdk/WebApi/WebApi/Utilities/ClientGeneratorAttributes.cs new file mode 100644 index 00000000000..134a9e5e0f1 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Utilities/ClientGeneratorAttributes.cs @@ -0,0 +1,59 @@ +using System; + +namespace GitHub.Services.WebApi.Internal +{ + /// + /// GenClient (SwaggerGenerator) will ignore controller methods, parameters, and classes that have this attribute. + /// + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Parameter | AttributeTargets.Class | AttributeTargets.Enum, AllowMultiple = false)] + public sealed class ClientIgnoreAttribute : Attribute + { + public ClientIgnoreAttribute() + { + } + } + + /// + /// When a method or class has this attribute, we will only generate client methods for the specified languages. + /// + [AttributeUsage(AttributeTargets.Method | AttributeTargets.Class | AttributeTargets.Parameter, AllowMultiple = false)] + public sealed class ClientIncludeAttribute : Attribute + { + /// + /// + /// + /// A list of languages to generate methods for. + public ClientIncludeAttribute(RestClientLanguages languages) + { + Languages = languages; + } + + public RestClientLanguages Languages { get; private set; } + } + + [Flags] + public enum RestClientLanguages + { + All = ~0, + CSharp = 1, + Java = 2, + TypeScript = 4, + Nodejs = 8, + [Obsolete("DocMD has been replaced by Swagger generated REST Documentation.")] + DocMD = 16, + Swagger2 = 32, + Python = 64, + TypeScriptWebPlatform = 128 + } + + /// + /// Suppresses the default constant enum generation behavior in typescriptwebplatform clientgen. When using this attribute, and affected code generation will product a .ts file instead of a .d.ts file (non-constant enumerations should not be generated into .d.ts files). + /// + [AttributeUsage(AttributeTargets.Enum)] + public class ClientDontGenerateTypeScriptEnumAsConst : Attribute + { + public ClientDontGenerateTypeScriptEnumAsConst() + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/Utilities/ISecuredObject.cs b/src/Sdk/WebApi/WebApi/Utilities/ISecuredObject.cs new file mode 100644 index 00000000000..977542d61bf --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Utilities/ISecuredObject.cs @@ -0,0 +1,42 @@ +using System; +using System.ComponentModel; + +namespace GitHub.Services.WebApi +{ + /// + /// Any responses from public APIs must implement this interface. It is used to enforce that + /// the data being returned has been security checked. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ISecuredObject + { + /// + /// The id of the namespace which secures this resource. + /// + Guid NamespaceId + { + get; + } + + /// + /// The security bit to demand. + /// + Int32 RequiredPermissions + { + get; + } + + /// + /// The token to secure this resource. + /// + String GetToken(); + } + + /// + /// Containers of ISecuredObjects should implement this interface. If you implement this interface, all + /// serializable properties must be of type ISecuredObject or IEnumerable of ISecuredObject. This will + /// be enforced using a roslyn analyzer. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public interface ISecuredObjectContainer { } +} diff --git a/src/Sdk/WebApi/WebApi/Utilities/UserAgentUtility.cs b/src/Sdk/WebApi/WebApi/Utilities/UserAgentUtility.cs new file mode 100644 index 00000000000..b5fbec3d6f1 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Utilities/UserAgentUtility.cs @@ -0,0 +1,217 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.Net.Http.Headers; +using System.Reflection; +using System.Runtime.InteropServices; +using System.Text; +using GitHub.Services.Common; +using GitHub.Services.Common.Internal; +using Microsoft.Win32; + +namespace GitHub.Services.WebApi.Utilities.Internal +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class UserAgentUtility + { + private static Lazy> s_defaultRestUserAgent + = new Lazy>(ConstructDefaultRestUserAgent); + + public static List GetDefaultRestUserAgent() + { + return s_defaultRestUserAgent.Value; + } + + private static List ConstructDefaultRestUserAgent() + { +#if !NETSTANDARD + // Get just the exe file name without the path. + String exe; + try + { + exe = Path.GetFileName(NativeMethods.GetModuleFileName()); + } + catch (Exception e) + { + Trace.WriteLine("DefaultUserAgent: Unable to get exe. " + e.ToString()); + + // We weren't allowed to get the exe file name, so we go on. + exe = "unavailable"; + } + + Tuple skuInfo = null; + if (String.Equals(exe, "devenv.exe", StringComparison.OrdinalIgnoreCase)) + { + skuInfo = GetCurrentSkuInfo(); + } + + String app = String.Empty; + if (AppDomain.CurrentDomain != null) + { + app = (String)AppDomain.CurrentDomain.GetData(AdminConstants.ApplicationName); + } + + if (!String.IsNullOrEmpty(app)) + { + exe = String.Concat(exe, "[", app, "]"); + } +#endif + // Pick up the assembly version from this dll + String fileVersion = "unavailable"; + try + { + AssemblyFileVersionAttribute attr = typeof(UserAgentUtility).GetTypeInfo().Assembly.GetCustomAttribute(); + if (attr != null) + { + fileVersion = attr.Version; + } + } + catch (Exception e) + { + Trace.WriteLine("DefaultUserAgent: Unable to get fileVersion: " + e.ToString()); + } + + +#if !NETSTANDARD + + Debug.Assert(fileVersion.StartsWith("16", StringComparison.OrdinalIgnoreCase), + "The SKU numbers here are only meant to work with Dev16. For later versions the SKU numbers mapped in the GetSkuNumber method need to be updated."); + StringBuilder builder = new StringBuilder(); + builder.Append("("); + builder.Append(exe); + if (skuInfo != null) + { + builder.Append(", "); + builder.Append(skuInfo.Item1); + builder.Append(", SKU:"); + builder.Append(skuInfo.Item2.ToString(CultureInfo.InvariantCulture)); + } + builder.Append(")"); + + String commentValue = builder.ToString(); +#else + String commentValue = string.Format("(NetStandard; {0})", RuntimeInformation.OSDescription.Replace('(', '[').Replace(')', ']').Trim()); +#endif + return new List { + new ProductInfoHeaderValue("VSServices", fileVersion), + new ProductInfoHeaderValue(commentValue) }; + } + + +#if !NETSTANDARD + private static Lazy s_defaultSoapUserAgent = new Lazy(ConstructDefaultSoapUserAgent); + + public static String GetDefaultSoapUserAgent() + { + return s_defaultSoapUserAgent.Value; + } + + private static string ConstructDefaultSoapUserAgent() + { + // Get just the exe file name without the path. + String exe; + try + { + exe = Path.GetFileName(NativeMethods.GetModuleFileName()); + } + catch (Exception e) + { + Trace.WriteLine("DefaultUserAgent: Unable to get exe: " + e.ToString()); + + // We weren't allowed to get the exe file name, so we go on. + exe = "unavailable"; + } + + Tuple skuInfo = null; + if (String.Equals(exe, "devenv.exe", StringComparison.OrdinalIgnoreCase)) + { + skuInfo = GetCurrentSkuInfo(); + } + + String app = String.Empty; + if (AppDomain.CurrentDomain != null) + { + app = (String)AppDomain.CurrentDomain.GetData("ApplicationName"); + } + + if (!String.IsNullOrEmpty(app)) + { + exe = String.Concat(exe, "[", app, "]"); + } + + // Pick up the assembly version from the current dll. + String fileVersion = String.Empty; + try + { + Object[] attrs = typeof(UserAgentUtility).Assembly.GetCustomAttributes(false); + foreach (Object attr in attrs) + { + if (attr is AssemblyFileVersionAttribute) + { + fileVersion = ((AssemblyFileVersionAttribute)attr).Version; + break; + } + } + } + catch (Exception e) + { + Trace.WriteLine("DefaultUserAgent: Unable to get fileVersion: " + e.ToString()); + + // We weren't allowed to get the version info, so we go on. + fileVersion = "unavailable"; + } + + StringBuilder userAgent = new StringBuilder(); + userAgent.Append("Team Foundation ("); + userAgent.Append(exe); + userAgent.Append(", "); + userAgent.Append(fileVersion); + if (skuInfo != null) + { + userAgent.Append(", "); + userAgent.Append(skuInfo.Item1); + userAgent.Append(", SKU:"); + userAgent.Append(skuInfo.Item2.ToString(CultureInfo.InvariantCulture)); + } + userAgent.Append(")"); + return userAgent.ToString(); + } + + + private static Tuple GetCurrentSkuInfo() + { + string vsSkuEdition = Environment.GetEnvironmentVariable("VSSKUEDITION"); + if (!string.IsNullOrEmpty(vsSkuEdition)) + { + Tuple skuInfo; + if (s_dev16SkuToAgentStringMap.TryGetValue(vsSkuEdition, out skuInfo)) + { + return skuInfo; + } + else + { + Debug.Fail("Unrecognized value for VSSKUEDITION: '{0}'. This value needs to be added to the s_dev16SkuToAgentStringMap.", vsSkuEdition); + } + } + + return new Tuple(ClientSkuNames.Dev16.Other, ClientSkuNumbers.Dev16Other); + } + + /// + /// The key is the SKU name provided by VSSKUEDITION env variable. The value is a tuple. Item1 is a string for the SKU Name to put in the User Agent string, and Item2 is an int for the SkuCode. + /// + private static readonly Dictionary> s_dev16SkuToAgentStringMap = new Dictionary>(StringComparer.OrdinalIgnoreCase) + { + { "Enterprise", new Tuple(ClientSkuNames.Dev16.Enterprise, ClientSkuNumbers.Dev16Enterprise) }, + { "Professional", new Tuple(ClientSkuNames.Dev16.Pro, ClientSkuNumbers.Dev16Pro) }, + { "Community", new Tuple(ClientSkuNames.Dev16.Community, ClientSkuNumbers.Dev16Community) }, + { "V3|UNKNOWN", new Tuple(ClientSkuNames.Dev16.TE, ClientSkuNumbers.Dev16TeamExplorer) }, + { "V4|UNKNOWN", new Tuple(ClientSkuNames.Dev16.Sql, ClientSkuNumbers.Dev16Sql) }, // future release as of 4/25/2017. + { "IntShell", new Tuple(ClientSkuNames.Dev16.IntShell, ClientSkuNumbers.Dev16IntShell) } // future release as of 4/25/2017. This key may change. + }; +#endif + } +} diff --git a/src/Sdk/WebApi/WebApi/Utilities/XmlSerializableDataContractExtensions.cs b/src/Sdk/WebApi/WebApi/Utilities/XmlSerializableDataContractExtensions.cs new file mode 100644 index 00000000000..06cea3ca908 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/Utilities/XmlSerializableDataContractExtensions.cs @@ -0,0 +1,355 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; +using System.Xml; +using System.Xml.Serialization; +using GitHub.Services.Common; +using GitHub.Services.Graph.Client; + +namespace GitHub.Services.WebApi.Xml +{ + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Enum, Inherited = true, AllowMultiple = false)] + public class XmlSerializableDataContractAttribute : Attribute + { + public XmlSerializableDataContractAttribute() { } + + public bool EnableCamelCaseNameCompat { get; set; } + } + + /// + /// These extensions are intended to be used alongside the interface + /// to allow classes to leverage some of the functionality of DataContractSerializer, + /// such as serialization of publicly immutable properties, while also supporting the conventional . + /// + public static class XmlSerializableDataContractExtensions + { + // This method has a class constraint because we can't use reflection to mutate the properties of a struct without boxing + public static void ReadDataMemberXml(this XmlReader reader, T destination) where T : class + { + ArgumentUtility.CheckForNull(reader, nameof(reader)); + + var isEmptyElement = reader.IsEmptyElement; + + reader.ReadStartElement(); + + if (isEmptyElement) + { + return; + } + + var serializableProperties = GetSerializableProperties(destination.GetType().GetTypeInfo()).MappedByName; + + while (reader.IsStartElement()) + { + if (!serializableProperties.TryGetValue(reader.LocalName, out var property)) + { + reader.ReadOuterXml(); + continue; + } + + isEmptyElement = reader.IsEmptyElement; + + object propertyValue; + + if (destination is GraphSubjectBase && property.SerializedName == nameof(GraphSubjectBase.Descriptor)) + { + var propertySerializer = GetSerializer(reader.NamespaceURI, reader.LocalName, typeof(SubjectDescriptor)); + propertyValue = propertySerializer.Deserialize(reader.ReadSubtree()).ToString(); + } + else + { + var propertySerializer = GetSerializer(reader.NamespaceURI, reader.LocalName, property.SerializedType); + propertyValue = propertySerializer.Deserialize(reader.ReadSubtree()); + } + + property.SetValue(destination, propertyValue); + + if (isEmptyElement) + { + reader.ReadOuterXml(); + } + else + { + reader.ReadEndElement(); + } + } + + reader.ReadEndElement(); + } + + public static void WriteDataMemberXml(this XmlWriter writer, object source) + { + ArgumentUtility.CheckForNull(writer, nameof(writer)); + + var type = source.GetType().GetTypeInfo(); + var shouldWriteNamespace = writer.Settings == null; + var rootNamespace = shouldWriteNamespace ? GetNamespace(type) : null; + var serializableProperties = GetSerializableProperties(type).EnumeratedInOrder; + foreach (var property in serializableProperties) + { + if (!property.ShouldSerialize(source)) + { + continue; + } + + var propertyValue = property.GetValue(source); + if (property.IsIgnorableDefaultValue(propertyValue)) + { + continue; + } + + var propertySerializer = GetSerializer(rootNamespace, property.SerializedName, property.SerializedType); + propertySerializer.Serialize(writer, propertyValue); + + if (!string.IsNullOrEmpty(property.SerializedNameForCamelCaseCompat)) + { + propertySerializer = GetSerializer(rootNamespace, property.SerializedNameForCamelCaseCompat, property.SerializedType); + propertySerializer.Serialize(writer, propertyValue); + } + } + } + + private static string GetNamespace(TypeInfo type) + { + if (!NamespacesByType.TryGetValue(type, out string outputNamespace)) + { + outputNamespace = $"http://schemas.datacontract.org/2004/07/{type.Namespace}"; + NamespacesByType.TryAdd(type, outputNamespace); + } + + return outputNamespace; + } + + private static SerializableProperties GetSerializableProperties(TypeInfo type) + { + if (SerializablePropertiesByType.TryGetValue(type, out var properties)) + { + return properties; + } + + var dataContract = type.GetCustomAttribute(typeof(XmlSerializableDataContractAttribute)) as XmlSerializableDataContractAttribute; + var enableCamelCaseNameCompat = dataContract == null ? false : dataContract.EnableCamelCaseNameCompat; + + var declaredProperties = new List(); + + foreach (var declaredProperty in type.DeclaredProperties) + { + if (declaredProperty.GetCustomAttribute(typeof(XmlIgnoreAttribute)) != null) + { + continue; + } + + if (declaredProperty.SetMethod == null) + { + continue; + } + + var dataMember = declaredProperty.GetCustomAttribute(typeof(DataMemberAttribute)) as DataMemberAttribute; + if (dataMember == null) + { + continue; + } + + var shouldSerializeMethodName = string.Concat("ShouldSerialize", declaredProperty.Name); + var shouldSerializeMethod = type.GetDeclaredMethod(shouldSerializeMethodName); + + declaredProperties.Add(new SerializableProperty(declaredProperty, dataMember, shouldSerializeMethod, enableCamelCaseNameCompat)); + } + + var inheritedProperties = Enumerable.Empty(); + if (type.BaseType != typeof(object)) + { + inheritedProperties = GetSerializableProperties(type.BaseType.GetTypeInfo()).EnumeratedInOrder; + } + + var serializableProperties = new SerializableProperties(declaredProperties, inheritedProperties); + + return SerializablePropertiesByType.GetOrAdd(type, serializableProperties); + } + + private static XmlSerializer GetSerializer(string rootNamespace, string elementName, Type elementType) + { + var serializerKey = new SerializerKey(rootNamespace, elementName, elementType); + return Serializers.GetOrAdd(serializerKey, _ => + { + var rootAttribute = new XmlRootAttribute(elementName) { Namespace = rootNamespace }; + return new XmlSerializer(elementType, rootAttribute); + }); + } + + private static ConcurrentDictionary SerializablePropertiesByType + = new ConcurrentDictionary(); + + private static ConcurrentDictionary NamespacesByType + = new ConcurrentDictionary(); + + private static ConcurrentDictionary Serializers + = new ConcurrentDictionary(); + + /// + /// Creates a HashSet based on the elements in , using transformation + /// function . + /// + private static HashSet ToHashSet( + this IEnumerable source, + Func selector) + { + return new HashSet(source.Select(selector)); + } + + private class SerializableProperties + { + public IReadOnlyDictionary MappedByName { get; } + + public IReadOnlyList EnumeratedInOrder { get; } + + public SerializableProperties(IEnumerable declaredProperties, IEnumerable inheritedProperties) + { + var declaredPropertyNames = declaredProperties.ToHashSet(property => property.SerializedName); + + // To maintain consistency with the DataContractSerializer, property ordering is determined according to the following rules: + // 1. If a data contract type is a part of an inheritance hierarchy, data members of its base types are always first in the order. + // 2. Next in order are the current type’s data members that do not have the Order property of the DataMemberAttribute attribute set, in alphabetical order. + // https://docs.microsoft.com/en-us/dotnet/framework/wcf/feature-details/data-member-order + EnumeratedInOrder = inheritedProperties + // Subclass properties should hide inherited properties with the same name + .Where(inheritedProperty => !declaredPropertyNames.Contains(inheritedProperty.SerializedName)) + .Concat(declaredProperties.OrderBy(property => property.SerializedName)) + .ToList(); + + var propertiesMappedByName = new Dictionary(); + foreach (var property in EnumeratedInOrder) + { + propertiesMappedByName.Add(property.SerializedName, property); + if (property.SerializedNameForCamelCaseCompat != null) + { + propertiesMappedByName.TryAdd(property.SerializedNameForCamelCaseCompat, property); + } + } + MappedByName = propertiesMappedByName; + } + + private Dictionary PropertiesDictionary { get; } + } + + [DebuggerDisplay("Name={SerializedName} Type={SerializedType}")] + private class SerializableProperty + { + public Type SerializedType => Property.PropertyType; + + public string SerializedName { get; } + + public string SerializedNameForCamelCaseCompat { get; } + + public SerializableProperty(PropertyInfo property, DataMemberAttribute dataMember, MethodInfo shouldSerializeMethod, bool enableCamelCaseNameCompat) + { + Property = property; + DataMember = dataMember; + ShouldSerializeMethod = shouldSerializeMethod; + + SerializedName = DataMember?.Name ?? Property.Name; + SerializedNameForCamelCaseCompat = ComputeSerializedNameForCameCaseCompat(enableCamelCaseNameCompat); + } + + public object GetValue(object @object) => Property.GetValue(@object); + + public void SetValue(object @object, object value) => Property.SetValue(@object, value); + + public bool ShouldSerialize(object @object) + => ShouldSerializeMethod == null ? true : (bool)ShouldSerializeMethod.Invoke(@object, new object[] { }); + + public bool IsIgnorableDefaultValue(object value) + { + if (DataMember.EmitDefaultValue) + { + return false; + } + + var serializedType = SerializedType; + if (serializedType.GetTypeInfo().IsValueType) + { + var defaultValue = DefaultValuesByType.GetOrAdd(serializedType, key => Activator.CreateInstance(key)); + return Equals(value, defaultValue); + } + + return value == null; + } + + private string ComputeSerializedNameForCameCaseCompat(bool enableCamelCaseNameCompat) + { + if (!enableCamelCaseNameCompat) + { + return null; + } + + var upperCamelCaseName = ConvertToUpperCamelCase(SerializedName); + + if (string.Equals(upperCamelCaseName, SerializedName)) + { + return null; + } + + return upperCamelCaseName; + } + + private static string ConvertToUpperCamelCase(string input) + { + return string.Concat(char.ToUpperInvariant(input[0]), input.Substring(1)); + } + + private PropertyInfo Property { get; } + private DataMemberAttribute DataMember { get; } + private MethodInfo ShouldSerializeMethod { get; } + private static ConcurrentDictionary DefaultValuesByType = new ConcurrentDictionary(); + } + + private struct SerializerKey + { + public string RootNamespace { get; } + + public string ElementName { get; } + + public Type ElementType { get; } + + public SerializerKey(string rootNamespace, string elementName, Type elementType) + { + // root namespace can be null, but element name and type must be nonnull + ArgumentUtility.CheckForNull(elementName, nameof(elementName)); + ArgumentUtility.CheckForNull(elementType, nameof(elementType)); + RootNamespace = rootNamespace; + ElementName = elementName; + ElementType = elementType; + } + + public override bool Equals(object other) + { + if (other is SerializerKey) + { + var otherKey = (SerializerKey)other; + return RootNamespace == otherKey.RootNamespace + && ElementName == otherKey.ElementName + && ElementType == otherKey.ElementType; + } + + return false; + } + + public override int GetHashCode() + { + int hashCode = 7443; // "large" prime to start the seed + + // Bitshifting and subtracting once is an efficient way to multiply by our second "large" prime, 0x7ffff = 524287 + hashCode = (hashCode << 19) - hashCode + (RootNamespace?.GetHashCode() ?? 0); + hashCode = (hashCode << 19) - hashCode + ElementName.GetHashCode(); + hashCode = (hashCode << 19) - hashCode + ElementType.GetHashCode(); + + return hashCode; + } + } + + } +} diff --git a/src/Sdk/WebApi/WebApi/VssApiResourceLocation.cs b/src/Sdk/WebApi/WebApi/VssApiResourceLocation.cs new file mode 100644 index 00000000000..3a3628675eb --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssApiResourceLocation.cs @@ -0,0 +1,197 @@ +using System; +using System.Runtime.Serialization; +using System.ComponentModel; +using GitHub.Services.Location; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi +{ + /// + /// Information about the location of a REST API resource + /// + [DataContract] + public class ApiResourceLocation : IEquatable, ISecuredObject + { + /// + /// Unique Identifier for this location + /// + [DataMember] + public Guid Id { get; set; } + + /// + /// Area name for this resource + /// + [DataMember] + public String Area { get; set; } + + /// + /// Resource name + /// + [DataMember] + public String ResourceName { get; set; } + + /// + /// This location's route template (templated relative path) + /// + [DataMember] + public String RouteTemplate { get; set; } + + /// + /// The name of the route (not serialized to the client) + /// + public String RouteName { get; set; } + + /// + /// The current resource version supported by this resource location + /// + [DataMember] + public Int32 ResourceVersion { get; set; } + + /// + /// Minimum api version that this resource supports + /// + public Version MinVersion { get; set; } + + /// + /// Minimum api version that this resource supports + /// + [DataMember(Name = "MinVersion")] + public String MinVersionString + { + get + { + return MinVersion.ToString(2); + } + private set + { + if (String.IsNullOrEmpty(value)) + { + MinVersion = new Version(1, 0); + } + else + { + MinVersion = new Version(value); + } + } + } + + /// + /// Maximum api version that this resource supports (current server version for this resource) + /// + public Version MaxVersion { get; set; } + + /// + /// Maximum api version that this resource supports (current server version for this resource) + /// + [DataMember(Name = "MaxVersion")] + public String MaxVersionString + { + get + { + return MaxVersion.ToString(2); + } + private set + { + if (String.IsNullOrEmpty(value)) + { + MaxVersion = new Version(1, 0); + } + else + { + MaxVersion = new Version(value); + } + } + } + + /// + /// The latest version of this resource location that is in "Release" (non-preview) mode + /// + public Version ReleasedVersion { get; set; } + + /// + /// The latest version of this resource location that is in "Release" (non-preview) mode + /// + [DataMember(Name = "ReleasedVersion")] + public String ReleasedVersionString + { + get + { + return ReleasedVersion.ToString(2); + } + private set + { + if (String.IsNullOrEmpty(value)) + { + ReleasedVersion = new Version(1, 0); + } + else + { + ReleasedVersion = new Version(value); + } + } + } + + /// + /// + /// + /// + public ServiceDefinition ToServiceDefinition(InheritLevel level = InheritLevel.None) + { + return new ServiceDefinition() + { + Identifier = this.Id, + ServiceType = this.Area, + DisplayName = this.ResourceName, + Description = "Resource Location", + RelativePath = this.RouteTemplate, + ResourceVersion = this.ResourceVersion, + MinVersion = this.MinVersion, + MaxVersion = this.MaxVersion, + ReleasedVersion = this.ReleasedVersion, + ToolId = "Framework", // needed for back compat for old soap clients + InheritLevel = level + }; + } + + /// + /// + /// + /// + /// + public static ApiResourceLocation FromServiceDefinition(ServiceDefinition definition) + { + return new ApiResourceLocation() + { + Id = definition.Identifier, + Area = definition.ServiceType, + ResourceName = definition.DisplayName, + RouteTemplate = definition.RelativePath, + ResourceVersion = definition.ResourceVersion, + MinVersion = definition.MinVersion, + MaxVersion = definition.MaxVersion, + ReleasedVersion = definition.ReleasedVersion, + }; + } + + public bool Equals(ApiResourceLocation other) + { + return (Guid.Equals(Id, other.Id) && + string.Equals(Area, other.Area) && + string.Equals(ResourceName, other.ResourceName) && + string.Equals(RouteTemplate, other.RouteTemplate) && + string.Equals(RouteName, other.RouteName) && + Version.Equals(ResourceVersion, other.ResourceVersion) && + Version.Equals(MinVersion, other.MinVersion) && + Version.Equals(MaxVersion, other.MaxVersion) && + Version.Equals(ReleasedVersion, other.ReleasedVersion)); + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => LocationSecurityConstants.NamespaceId; + + int ISecuredObject.RequiredPermissions => LocationSecurityConstants.Read; + + string ISecuredObject.GetToken() => LocationSecurityConstants.NamespaceRootToken; + #endregion + } +} diff --git a/src/Sdk/WebApi/WebApi/VssApiResourceLocationCollection.cs b/src/Sdk/WebApi/WebApi/VssApiResourceLocationCollection.cs new file mode 100644 index 00000000000..9f4439c7ddf --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssApiResourceLocationCollection.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections.Generic; +using System.Linq; + +namespace GitHub.Services.WebApi +{ + /// + /// Container for API resource locations + /// + public class ApiResourceLocationCollection + { + private Dictionary m_locationsById = new Dictionary(); + private Dictionary> m_locationsByKey = new Dictionary>(); + + /// + /// Add a new API resource location + /// + /// API resource location to add + public void AddResourceLocation(ApiResourceLocation location) + { + ApiResourceLocation existingLocation; + if (m_locationsById.TryGetValue(location.Id, out existingLocation)) + { + if (!location.Equals(existingLocation)) // unit tests will register same resources multiple times, so only throw if the ApiResourceLocation doesn't match what is already cached. + { + throw new VssApiResourceDuplicateIdException(location.Id); + } + } + + m_locationsById[location.Id] = location; + + List locationsByKey; + String locationCacheKey = GetLocationCacheKey(location.Area, location.ResourceName); + if (!m_locationsByKey.TryGetValue(locationCacheKey, out locationsByKey)) + { + locationsByKey = new List(); + m_locationsByKey.Add(locationCacheKey, locationsByKey); + } + + if (!locationsByKey.Any(x => x.Id.Equals(location.Id))) + { + locationsByKey.Add(location); + } + } + + /// + /// Add new API resource locations + /// + /// API resource locations to add + public void AddResourceLocations(IEnumerable locations) + { + if (locations != null) + { + foreach (ApiResourceLocation location in locations) + { + AddResourceLocation(location); + } + } + } + + private String GetLocationCacheKey(String area, String resourceName) + { + if (area == null) + { + area = String.Empty; + } + if (resourceName == null) + { + resourceName = String.Empty; + } + + return String.Format("{0}:{1}", area.ToLower(), resourceName.ToLower()); + } + + /// + /// Get an API resource location by location id. Returns null if not found. + /// + /// Id of the registered resource location + /// ApiResourceLocation or null if not found + public ApiResourceLocation TryGetLocationById(Guid locationId) + { + ApiResourceLocation location; + m_locationsById.TryGetValue(locationId, out location); + return location; + } + + /// + /// Get an API resource location by location id. Throws if not found. + /// + /// Id of the registered resource location + /// ApiResourceLocation or null if not found + public ApiResourceLocation GetLocationById(Guid locationId) + { + ApiResourceLocation location = TryGetLocationById(locationId); + if (location == null) + { + throw new VssResourceNotFoundException(locationId); + } + return location; + } + + /// + /// Get all API resource locations + /// + /// + public IEnumerable GetAllLocations() + { + return m_locationsById.Values; + } + + /// + /// Get all API resource locations under a given area + /// + /// Resource area name + /// + public IEnumerable GetAreaLocations(String area) + { + return m_locationsById.Values.Where(l => String.Equals(area, l.Area, StringComparison.OrdinalIgnoreCase)); + } + + /// + /// Get all API resource locations for a given resource. + /// + /// Note: There are multiple locations for a given resource when multiple routes are registered for that resource + /// Resource area name + /// Resource name + /// + public IEnumerable GetResourceLocations(String area, String resourceName) + { + List locationsByKey; + String locationCacheKey = GetLocationCacheKey(area, resourceName); + + if (m_locationsByKey.TryGetValue(locationCacheKey, out locationsByKey)) + { + return locationsByKey; + } + else + { + return Enumerable.Empty(); + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssApiResourceVersion.cs b/src/Sdk/WebApi/WebApi/VssApiResourceVersion.cs new file mode 100644 index 00000000000..b23236f7850 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssApiResourceVersion.cs @@ -0,0 +1,196 @@ +using System; +using System.Globalization; +using System.Runtime.Serialization; +using System.Text; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi +{ + /// + /// Represents version information for a REST Api resource + /// + [DataContract] + public class ApiResourceVersion + { + private const String c_PreviewStageName = "preview"; + + /// + /// Construct a new API Version info + /// + /// Public API version + /// Resource version + public ApiResourceVersion(double apiVersion, int resourceVersion = 0) + : this(new Version(apiVersion.ToString("0.0", CultureInfo.InvariantCulture)), resourceVersion) + { + } + + /// + /// Construct a new API resource Version + /// + public ApiResourceVersion() + : this(1.0) + { + } + + /// + /// Construct a new API Version info + /// + /// Public API version + /// Resource version + public ApiResourceVersion(Version apiVersion, int resourceVersion = 0) + { + ArgumentUtility.CheckForNull(apiVersion, "apiVersion"); + + ApiVersion = apiVersion; + ResourceVersion = resourceVersion; + + if (resourceVersion > 0) + { + IsPreview = true; + } + } + + /// + /// Construct a new API Version info from the given version string + /// + /// Version string in the form: + /// {ApiMajor}.{ApiMinor}[-{stage}[.{resourceVersion}]] + /// + /// For example: 1.0 or 2.0-preview or 2.0-preview.3 + public ApiResourceVersion(String apiResourceVersionString) + { + this.FromVersionString(apiResourceVersionString); + } + + /// + /// Public API version. This is the version that the public sees and is used for a large + /// group of services (e.g. the TFS 1.0 API) + /// + public Version ApiVersion { get; private set; } + + /// + /// String representation of the Public API version. This is the version that the public sees and is used + /// for a large group of services (e.g. the TFS 1.0 API) + /// + [DataMember(Name = "ApiVersion")] + public String ApiVersionString + { + get + { + return ApiVersion.ToString(2); + } + private set + { + if (String.IsNullOrEmpty(value)) + { + ApiVersion = new Version(1, 0); + } + else + { + ApiVersion = new Version(value); + } + } + } + + /// + /// Internal resource version. This is defined per-resource and is used to support + /// build-to-build compatibility of API changes within a given (in-preview) public api version. + /// For example, within the TFS 1.0 API release cycle, while it is still in preview, a resource's + /// data structure may be changed. This resource can be versioned such that older clients will + /// still work (requests will be sent to the older version) and new/upgraded clients will + /// talk to the new version of the resource. + /// + [DataMember] + public int ResourceVersion { get; set; } + + /// + /// Is the public API version in preview + /// + [DataMember(EmitDefaultValue = false)] + public bool IsPreview { get; set; } + + /// + /// Returns the version string in the form: + /// {ApiMajor}.{ApiMinor}[-{stage}[.{resourceVersion}]] + /// + /// + public override string ToString() + { + StringBuilder sbVersion = new StringBuilder(ApiVersion.ToString(2)); + if (IsPreview) + { + sbVersion.Append('-'); + sbVersion.Append(c_PreviewStageName); + + if (ResourceVersion > 0) + { + sbVersion.Append('.'); + sbVersion.Append(ResourceVersion); + } + } + return sbVersion.ToString(); + } + + private void FromVersionString(String apiVersionString) + { + if (String.IsNullOrEmpty(apiVersionString)) + { + throw new VssInvalidApiResourceVersionException(apiVersionString); + } + + // Check for a stage/resourceVersion string + int dashIndex = apiVersionString.IndexOf('-'); + if (dashIndex >= 0) + { + String stageName; + + // Check for a '.' which separate stage from resource version + int dotIndex = apiVersionString.IndexOf('.', dashIndex); + if (dotIndex > 0) + { + stageName = apiVersionString.Substring(dashIndex + 1, dotIndex - dashIndex - 1); + + int resourceVersion; + String resourceVersionString = apiVersionString.Substring(dotIndex + 1); + if (!int.TryParse(resourceVersionString, out resourceVersion)) + { + throw new VssInvalidApiResourceVersionException(apiVersionString); + } + else + { + this.ResourceVersion = resourceVersion; + } + } + else + { + stageName = apiVersionString.Substring(dashIndex + 1); + } + + // Check for supported stage names + if (String.Equals(stageName, c_PreviewStageName, StringComparison.OrdinalIgnoreCase)) + { + IsPreview = true; + } + else + { + throw new VssInvalidApiResourceVersionException(apiVersionString); + } + + // Api version is the string before the dash + apiVersionString = apiVersionString.Substring(0, dashIndex); + } + + // Trim a leading "v" for version + apiVersionString = apiVersionString.TrimStart('v'); + + double apiVersionValue; + if (!double.TryParse(apiVersionString, NumberStyles.Any, NumberFormatInfo.InvariantInfo, out apiVersionValue)) + { + throw new VssInvalidApiResourceVersionException(apiVersionString); + } + + // Store the api version + this.ApiVersion = new Version(apiVersionValue.ToString("0.0", CultureInfo.InvariantCulture)); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssApiResourceVersionExtensions.cs b/src/Sdk/WebApi/WebApi/VssApiResourceVersionExtensions.cs new file mode 100644 index 00000000000..92a9777a4cf --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssApiResourceVersionExtensions.cs @@ -0,0 +1,93 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Linq; +using System.Net.Http.Headers; + +namespace GitHub.Services.WebApi +{ + /// + /// Extension methods for getting/setting API resource version information from requests and to responses + /// + public static class ApiResourceVersionExtensions + { + public const String c_apiVersionHeaderKey = "api-version"; + internal const String c_legacyResourceVersionHeaderKey = "res-version"; + + /// + /// Generate version key/value pairs to use in the header, replacing any existing api-version value. + /// + /// Header values to populate + /// Version to supply in the header + public static void AddApiResourceVersionValues(this ICollection headerValues, ApiResourceVersion version) + { + AddApiResourceVersionValues(headerValues, version, replaceExisting: true, useLegacyFormat: false); + } + + /// + /// Generate version key/value pairs to use in the header + /// + /// Header values to populate + /// Version to supply in the header + /// If true, replace an existing header with the specified version. Otherwise no-op in that case + public static void AddApiResourceVersionValues(this ICollection headerValues, ApiResourceVersion version, Boolean replaceExisting) + { + AddApiResourceVersionValues(headerValues, version, replaceExisting, useLegacyFormat: false); + } + + /// + /// Generate version key/value pairs to use in the header + /// + /// Header values to populate + /// Version to supply in the header + /// If true, replace an existing header with the specified version. Otherwise no-op in that case + /// If true, use the legacy format of api-version combined with res-version + internal static void AddApiResourceVersionValues(this ICollection headerValues, ApiResourceVersion version, Boolean replaceExisting, Boolean useLegacyFormat) + { + String apiVersionHeaderValue = null; + String resVersionHeaderValue = null; + + if (useLegacyFormat) + { + apiVersionHeaderValue = version.ApiVersionString; + if (version.ResourceVersion > 0) + { + resVersionHeaderValue = version.ResourceVersion.ToString(); + } + } + else + { + apiVersionHeaderValue = version.ToString(); + } + + NameValueHeaderValue existingHeader = headerValues.FirstOrDefault(h => String.Equals(c_apiVersionHeaderKey, h.Name)); + if (existingHeader != null) + { + if (replaceExisting) + { + existingHeader.Value = apiVersionHeaderValue; + if (!String.IsNullOrEmpty(resVersionHeaderValue)) + { + NameValueHeaderValue existingResHeader = headerValues.FirstOrDefault(h => String.Equals(c_legacyResourceVersionHeaderKey, h.Name)); + if (existingResHeader != null) + { + existingResHeader.Value = resVersionHeaderValue; + } + else + { + headerValues.Add(new NameValueHeaderValue(c_legacyResourceVersionHeaderKey, resVersionHeaderValue)); + } + } + } + } + else + { + headerValues.Add(new NameValueHeaderValue(c_apiVersionHeaderKey, apiVersionHeaderValue)); + if (!String.IsNullOrEmpty(resVersionHeaderValue)) + { + headerValues.Add(new NameValueHeaderValue(c_legacyResourceVersionHeaderKey, resVersionHeaderValue)); + } + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssCamelCasePropertyNamesContractResolver.cs b/src/Sdk/WebApi/WebApi/VssCamelCasePropertyNamesContractResolver.cs new file mode 100644 index 00000000000..84e7a6c0f88 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssCamelCasePropertyNamesContractResolver.cs @@ -0,0 +1,39 @@ +using System; +using Newtonsoft.Json.Serialization; + +namespace GitHub.Services.WebApi +{ + internal class VssCamelCasePropertyNamesContractResolver : CamelCasePropertyNamesContractResolver + { + protected override JsonDictionaryContract CreateDictionaryContract(Type type) + { + // We need to preserve case for keys in the PropertiesCollection + JsonDictionaryContract contract = base.CreateDictionaryContract(type); + contract.DictionaryKeyResolver = (name) => name; + return contract; + } + } + + internal class VssCamelCasePropertyNamesPreserveEnumsContractResolver : CamelCasePropertyNamesContractResolver + { + protected override JsonDictionaryContract CreateDictionaryContract(Type type) + { + // We need to preserve case for keys in the PropertiesCollection and optionally use integer values for enum keys + JsonDictionaryContract contract = base.CreateDictionaryContract(type); + + Type keyType = contract.DictionaryKeyType; + Boolean isEnumKey = keyType != null ? keyType.IsEnum : false; + + if (isEnumKey) + { + contract.DictionaryKeyResolver = (name) => ((int)Enum.Parse(keyType, name)).ToString(); + } + else + { + contract.DictionaryKeyResolver = (name) => name; + } + + return contract; + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssClientHttpRequestSettings.cs b/src/Sdk/WebApi/WebApi/VssClientHttpRequestSettings.cs new file mode 100644 index 00000000000..b7b741667c9 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssClientHttpRequestSettings.cs @@ -0,0 +1,161 @@ +using System; +using System.ComponentModel; +using System.Diagnostics; +using System.Globalization; +using System.Security; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Utilities.Internal; +using Microsoft.Win32; + +namespace GitHub.Services.WebApi +{ + /// + /// Provides access to common settings which control the behavior of requests for a VssHttpClient instance. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public sealed class VssClientHttpRequestSettings : VssHttpRequestSettings + { + public VssClientHttpRequestSettings() + : base() + { + } + + private VssClientHttpRequestSettings(VssClientHttpRequestSettings settingsToBeCloned) + : base(settingsToBeCloned) + { + } + + /// + /// Gets the default request settings. + /// + public static VssClientHttpRequestSettings Default => s_defaultSettings.Value; + + public VssClientHttpRequestSettings Clone() + { + return new VssClientHttpRequestSettings(this); + } + + /// + /// Reload the defaults from the Registry. + /// + internal static void ResetDefaultSettings() + { + s_defaultSettings = new Lazy(ConstructDefaultSettings); + } + + /// + /// Creates an instance of the default request settings. + /// + /// The default request settings + private static VssClientHttpRequestSettings ConstructDefaultSettings() + { + // Set up reasonable defaults in case the registry keys are not present + var settings = new VssClientHttpRequestSettings(); + +#if !NETSTANDARD + try + { + // Prefer HKCU over HKLM. + RegistryKey key = null; + + // Default store: HKCU + using (RegistryKey userRoot = VssClientEnvironment.TryGetUserRegistryRoot()) + { + if (userRoot != null) + { + key = userRoot.OpenSubKey(c_settingsKey); + } + } + + // Alternate store: HKLM + if (key == null) + { + using (RegistryKey applicationRoot = VssClientEnvironment.TryGetApplicationRegistryRoot()) + { + if (applicationRoot != null) + { + key = applicationRoot.OpenSubKey(c_settingsKey); + } + } + } + + // If no store, create the default store + if (key == null) + { + using (RegistryKey userRoot = VssClientEnvironment.TryGetUserRegistryRoot()) + { + if (userRoot != null) + { + key = userRoot.CreateSubKey(c_settingsKey); + } + } + + // Write defaults + String defaultAgentId = String.Format(CultureInfo.InvariantCulture, "VSS: {0}", Guid.NewGuid().ToString("D")); + key.SetValue(c_settingsAgentId, defaultAgentId); + } + + if (key != null) + { + using (key) + { + Boolean boolValue; + + if (Boolean.TryParse(key.GetValue(c_settingBypassProxyOnLocal) as String, out boolValue)) + { + settings.BypassProxyOnLocal = boolValue; + } + + if (Boolean.TryParse(key.GetValue(c_settingEnableCompression) as String, out boolValue)) + { + settings.CompressionEnabled = boolValue; + } + + if (key.GetValue(c_settingsDefaultTimeout) != null && key.GetValueKind(c_settingsDefaultTimeout) == RegistryValueKind.DWord) + { + settings.SendTimeout = TimeSpan.FromMilliseconds(Math.Max(1, (Int32)key.GetValue(c_settingsDefaultTimeout))); + } + + if (key.GetValue(c_settingsAgentId) != null && key.GetValueKind(c_settingsAgentId) == RegistryValueKind.String) + { + settings.AgentId = (String)key.GetValue(c_settingsAgentId); + } + } + } + + String bypass = Environment.GetEnvironmentVariable("TFS_BYPASS_PROXY_ON_LOCAL"); + if (!String.IsNullOrEmpty(bypass)) + { + settings.BypassProxyOnLocal = String.Equals(bypass, "1", StringComparison.Ordinal); + } + } + catch (Exception e) + { + // If the current account doesn't have privileges to access the registry (e.g. TFS service account) + // ignore any registry access errors... + if (!(e is SecurityException || e is UnauthorizedAccessException)) + { + Trace.WriteLine("An exception was encountered and ignored while reading settings: " + e); + } + } +#endif + + settings.UserAgent = UserAgentUtility.GetDefaultRestUserAgent(); + +#if !NETSTANDARD + //default this to true on client\server connections + settings.ClientCertificateManager = VssClientCertificateManager.Instance; +#endif + return settings; + } + + private static Lazy s_defaultSettings + = new Lazy(ConstructDefaultSettings); + + private const String c_settingsKey = "Services\\RequestSettings"; + private const String c_settingBypassProxyOnLocal = "BypassProxyOnLocal"; + private const String c_settingEnableCompression = "EnableCompression"; + private const String c_settingsDefaultTimeout = "DefaultTimeout"; + private const String c_settingsAgentId = "AgentId"; + } +} diff --git a/src/Sdk/WebApi/WebApi/VssClientSettings.cs b/src/Sdk/WebApi/WebApi/VssClientSettings.cs new file mode 100644 index 00000000000..01d2097a252 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssClientSettings.cs @@ -0,0 +1,126 @@ +using System; +using System.IO; +using GitHub.Services.Common; +using GitHub.Services.Common.ClientStorage; +using GitHub.Services.WebApi.Internal; +using Microsoft.Win32; + +namespace GitHub.Services.WebApi +{ + /// + /// Helper for retrieving client settings which are environment-specific or retrieved from the Windows Registry + /// + internal static class VssClientSettings + { + /// + /// Directory containing the client cache files which resides below the settings directory. + /// + /// This will look something like this: + /// C:\Documents and Settings\username\Local Settings\Application Data\Microsoft\VisualStudio Services\[GeneratedVersionInfo.TfsProductVersion]\Cache + /// + internal static string ClientCacheDirectory + { + get + { + return Path.Combine(ClientSettingsDirectory, "Cache"); + } + } + + /// + /// Directory containing the client settings files. + /// + /// This will look something like this: + /// C:\Documents and Settings\username\Local Settings\Application Data\Microsoft\VisualStudio Services\[GeneratedVersionInfo.TfsProductVersion] + /// + internal static string ClientSettingsDirectory + { + get + { + // We purposely do not cache this value. This value needs to change if + // Windows Impersonation is being used. + return Path.Combine(VssFileStorage.ClientSettingsDirectory, GeneratedVersionInfo.TfsProductVersion); + } + } + +#if !NETSTANDARD + /// + /// Defines the expiration interval for the location service client disk cache. + /// + internal static int? ClientCacheTimeToLive + { + get + { + if (s_clientCacheTimeToLive == null && !s_checkedClientCacheTimeToLive) + { + // Check once per process lifetime, but don't keep checking the registry over and over + s_checkedClientCacheTimeToLive = true; + + // Prefer HKCU over HKLM. + RegistryKey key = null; + + // Default store: HKCU + using (RegistryKey userRoot = VssClientEnvironment.TryGetUserRegistryRoot()) + { + if (userRoot != null) + { + key = userRoot.OpenSubKey(c_cacheSettingsKey); + } + } + + // Alternate store: HKLM + if (key == null) + { + using (RegistryKey applicationRoot = VssClientEnvironment.TryGetApplicationRegistryRoot()) + { + if (applicationRoot != null) + { + key = applicationRoot.OpenSubKey(c_cacheSettingsKey); + } + } + } + + if (key != null) + { + if (key.GetValue(c_settingClientCacheTimeToLive) != null && key.GetValueKind(c_settingClientCacheTimeToLive) == RegistryValueKind.DWord) + { + s_clientCacheTimeToLive = Math.Max(1, (int)key.GetValue(c_settingClientCacheTimeToLive)); + } + } + } + + return s_clientCacheTimeToLive; + } + set + { + // For testing purposes only + s_clientCacheTimeToLive = value; + } + } + + /// + /// Gets Connect() options which are overriden in the user registry hive. + /// + internal static void GetConnectionOverrides( + out VssConnectMode? connectModeOverride, + out string userOverride) + { + connectModeOverride = null; + userOverride = VssClientEnvironment.GetSharedConnectedUserValue(VssConnectionParameterOverrideKeys.FederatedAuthenticationUser); + + var modeOverride = VssClientEnvironment.GetSharedConnectedUserValue(VssConnectionParameterOverrideKeys.FederatedAuthenticationMode); + + VssConnectMode modeOverrideValue = VssConnectMode.Automatic; + + if (modeOverride != null && Enum.TryParse(modeOverride, out modeOverrideValue)) + { + connectModeOverride = modeOverrideValue; + } + } + + private static int? s_clientCacheTimeToLive; + private static bool s_checkedClientCacheTimeToLive; +#endif + private const string c_cacheSettingsKey = "Services\\CacheSettings"; + private const string c_settingClientCacheTimeToLive = "ClientCacheTimeToLive"; + } +} diff --git a/src/Sdk/WebApi/WebApi/VssConnectMode.cs b/src/Sdk/WebApi/WebApi/VssConnectMode.cs new file mode 100644 index 00000000000..07ae1b94e14 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssConnectMode.cs @@ -0,0 +1,12 @@ +namespace GitHub.Services.WebApi +{ + public enum VssConnectMode + { + Automatic = 0, + Licensing = 1, + Profile = 2, + Secure = 3, + User = 4, + Resource = 5 + } +} diff --git a/src/Sdk/WebApi/WebApi/VssConnection.cs b/src/Sdk/WebApi/WebApi/VssConnection.cs new file mode 100644 index 00000000000..ee7d6aa50e6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssConnection.cs @@ -0,0 +1,905 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.ComponentModel; +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Net.Http; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Location; +using GitHub.Services.WebApi.Internal; +using GitHub.Services.WebApi.Location; +using GitHub.Services.WebApi.Utilities; + +namespace GitHub.Services.WebApi +{ + public class VssConnection : IDisposable + { + public VssConnection( + Uri baseUrl, + VssCredentials credentials) + : this(baseUrl, credentials, VssClientHttpRequestSettings.Default.Clone()) + { + } + + public VssConnection( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : this(baseUrl, new VssHttpMessageHandler(credentials, settings), null) + { + } + + public VssConnection( + Uri baseUrl, + VssHttpMessageHandler innerHandler, + IEnumerable delegatingHandlers) + : this(baseUrl, innerHandler, delegatingHandlers, true) + { + } + + private VssConnection( + Uri baseUrl, + VssHttpMessageHandler innerHandler, + IEnumerable delegatingHandlers, + Boolean allowUnattributedClients) + { + ArgumentUtility.CheckForNull(baseUrl, "baseUrl"); + ArgumentUtility.CheckForNull(innerHandler, "innerHandler"); + + // Permit delegatingHandlers to be null + m_delegatingHandlers = delegatingHandlers = delegatingHandlers ?? Enumerable.Empty(); + + m_baseUrl = baseUrl; + m_innerHandler = innerHandler; + m_allowUnattributedClients = allowUnattributedClients; + + // Do we need to add a retry handler to the pipeline? If so, it needs to come last. + if (this.Settings.MaxRetryRequest > 0) + { + delegatingHandlers = delegatingHandlers.Concat(new DelegatingHandler[] { new VssHttpRetryMessageHandler(this.Settings.MaxRetryRequest) }); + } + + // Create and persist the pipeline. + if (delegatingHandlers.Any()) + { + m_pipeline = HttpClientFactory.CreatePipeline(m_innerHandler, delegatingHandlers); + } + else + { + m_pipeline = m_innerHandler; + } + + m_serverDataProvider = new VssServerDataProvider(this, m_pipeline, m_baseUrl.AbsoluteUri); + + if (innerHandler.Credentials != null) + { + // store base url on credentials, as it is required when creating a token storage key. + if (innerHandler.Credentials.Federated != null) + { + innerHandler.Credentials.Federated.TokenStorageUrl = baseUrl; + } + if (innerHandler.Credentials.Windows != null) + { + innerHandler.Credentials.Windows.TokenStorageUrl = baseUrl; + } + } + } + + /// + /// + /// + public Task ConnectAsync( + CancellationToken cancellationToken = default(CancellationToken)) + { + return ConnectAsync(VssConnectMode.Automatic, null, cancellationToken); + } + + public Task ConnectAsync( + VssConnectMode connectMode, + CancellationToken cancellationToken = default(CancellationToken)) + { + return ConnectAsync(connectMode, null, cancellationToken); + } + + public Task ConnectAsync( + VssConnectMode connectMode, + IDictionary parameters, + CancellationToken cancellationToken = default(CancellationToken)) + { + CheckForDisposed(); + // Set the connectMode on the credential's FederatedPrompt + if (Credentials.Federated != null && Credentials.Federated.Prompt != null) + { + if (parameters != null) + { + // Create a copy of the parameters if any were supplied. + parameters = new Dictionary(parameters); + } + else + { + parameters = new Dictionary(); + } + + IVssCredentialPrompt promptToSetParametersOn; + + // prompt can be a VssCredentialPrompts with VssFederatedCredentialPrompt inside it + IVssCredentialPrompts credentialPrompts = Credentials.Federated.Prompt as IVssCredentialPrompts; + if (credentialPrompts != null && credentialPrompts.FederatedPrompt != null) + { + // IVssCredentialPrompts contains an inner federatedPrompt, then set the paramaters on the inner one + promptToSetParametersOn = credentialPrompts.FederatedPrompt; + } + else + { + promptToSetParametersOn = Credentials.Federated.Prompt; + } + +#if DEBUG && !NETSTANDARD + VssConnectMode? connectModeOverride; + String userOverride; + VssClientSettings.GetConnectionOverrides(out connectModeOverride, out userOverride); + + if (connectModeOverride != null) + { + connectMode = connectModeOverride.Value; + } + if (userOverride != null) + { + parameters[VssConnectionParameterKeys.User] = userOverride; + } +#endif + parameters[VssConnectionParameterKeys.VssConnectionMode] = connectMode.ToString(); + promptToSetParametersOn.Parameters = parameters; + } + + return ServerDataProvider.ConnectAsync(ConnectOptions.None, cancellationToken); + } + + /// + /// + /// + public void Disconnect() + { + try + { + if (HasAuthenticated) + { + m_innerHandler.Credentials.SignOut(Uri, null, null); + } + } + finally + { + ServerDataProvider.DisconnectAsync().SyncResult(); + } + } + + /// + /// + /// + /// + /// + public T GetService() where T : IVssClientService + { + return (T)GetClientServiceImplAsync(typeof(T), Guid.Empty, GetServiceInstanceAsync).SyncResult(); + } + + /// + /// + /// + /// + /// + public async Task GetServiceAsync(CancellationToken cancellationToken = default(CancellationToken)) where T : IVssClientService + { + return (T)await GetClientServiceImplAsync(typeof(T), Guid.Empty, GetServiceInstanceAsync, cancellationToken).ConfigureAwait(false); + } + + /// + /// Retrieves an HTTP client of the specified type. + /// + /// The type of client to retrieve + /// The client of the specified type + public T GetClient() where T : VssHttpClientBase + { + return GetClientAsync().SyncResult(); + } + + /// + /// Retrieves an HTTP client of the specified type. + /// + /// The type of client to retrieve + /// Optional parameter. If supplied, the identifier will be used to resolve the + /// base address for the HTTP client. Otherwise the base address will be resolved using the service identifier + /// in the metadata of the requested client type (i.e. the ResourceArea attribute) + /// The client of the specified type + public T GetClient(Guid serviceIdentifier) where T : VssHttpClientBase + { + return GetClientAsync(serviceIdentifier).SyncResult(); + } + + /// + /// Retrieves an HTTP client of the specified type. + /// + /// The type of client to retrieve + /// The client of the specified type + public T GetClient(CancellationToken cancellationToken) where T : VssHttpClientBase + { + return GetClientAsync(cancellationToken).SyncResult(); + } + + /// + /// Retrieves an HTTP client of the specified type. + /// + /// The type of client to retrieve + /// Optional parameter. If supplied, the identifier will be used to resolve the + /// base address for the HTTP client. Otherwise the base address will be resolved using the service identifier + /// in the metadata of the requested client type (i.e. the ResourceArea attribute) + /// The client of the specified type + public T GetClient(Guid serviceIdentifier, CancellationToken cancellationToken) where T : VssHttpClientBase + { + return GetClientAsync(serviceIdentifier, cancellationToken).SyncResult(); + } + + /// + /// Retrieves an HTTP client of the specified type. + /// + /// The type of client to retrieve + /// The client of the specified type + public async Task GetClientAsync(CancellationToken cancellationToken = default(CancellationToken)) where T : VssHttpClientBase + { + CheckForDisposed(); + Type clientType = typeof(T); + Guid serviceIdentifier = GetServiceIdentifier(clientType); + + if (serviceIdentifier == Guid.Empty && !m_allowUnattributedClients) + { + throw new CannotGetUnattributedClientException(clientType); + } + + return (T)await GetClientServiceImplAsync(typeof(T), serviceIdentifier, GetClientInstanceAsync, cancellationToken).ConfigureAwait(false); + } + + /// + /// Retrieves an HTTP client of the specified type. + /// + /// The type of client to retrieve + /// Optional parameter. If supplied, the identifier will be used to resolve the + /// base address for the HTTP client. Otherwise the base address will be resolved using the service identifier + /// in the metadata of the requested client type (i.e. the ResourceArea attribute) + /// The client of the specified type + public async Task GetClientAsync(Guid serviceIdentifier, CancellationToken cancellationToken = default(CancellationToken)) where T : VssHttpClientBase + { + return (T)await GetClientServiceImplAsync(typeof(T), serviceIdentifier, GetClientInstanceAsync, cancellationToken).ConfigureAwait(false); + } + + /// + /// + /// + /// + /// + public Object GetClient(Type clientType) + { + // Verify incoming type is assignable from VssHttpClientBase + Type requiredBaseType = typeof(VssHttpClientBase); + + if (!requiredBaseType.GetTypeInfo().IsAssignableFrom(clientType.GetTypeInfo())) + { + throw new ArgumentException(requiredBaseType.FullName); + } + + // Return client instance + return GetClientServiceImplAsync(clientType, GetServiceIdentifier(clientType), GetClientInstanceAsync).SyncResult(); + } + + /// + /// + /// + /// + /// + /// + private async Task GetClientServiceImplAsync( + Type requestedType, + Guid serviceIdentifier, + Func> getInstanceAsync, + CancellationToken cancellationToken = default(CancellationToken)) + { + CheckForDisposed(); + Object requestedObject = null; + + // Get the actual type to lookup or instantiate, which will either be requestedType itself + // or an extensible type if one was registered + Type managedType = GetExtensibleType(requestedType); + + ClientCacheKey cacheKey = new ClientCacheKey(managedType, serviceIdentifier); + + // First check if we have this type already constructed + if (!m_cachedTypes.TryGetValue(cacheKey, out requestedObject)) + { + AsyncLock typeLock = m_loadingTypes.GetOrAdd(cacheKey, (t) => new AsyncLock()); + + // This ensures only a single thread at a time will be performing the work to initialize this particular type + // The other threads will go async awaiting the lock task. This is still an improvement over the old synchronous locking, + // as this thread won't be blocked (like a Monitor.Enter), but can return a task to the caller so that the thread + // can continue to be used to do useful work while the result is being worked on. + // We are trusting that getInstanceAsync does not have any code paths that lead back here (for the same type), otherwise we can deadlock on ourselves. + // The old code also extended the same trust which (if violated) would've resulted in a StackOverflowException, + // but with async tasks it will lead to a deadlock. + using (await typeLock.LockAsync(cancellationToken).ConfigureAwait(false)) + { + if (!m_cachedTypes.TryGetValue(cacheKey, out requestedObject)) + { + requestedObject = await getInstanceAsync(managedType, serviceIdentifier, cancellationToken).ConfigureAwait(false); + m_cachedTypes[cacheKey] = requestedObject; + + AsyncLock removed; + m_loadingTypes.TryRemove(cacheKey, out removed); + } + } + } + + return requestedObject; + } + + /// + /// + /// + /// + /// + /// + private Task GetClientInstanceAsync( + Type managedType, + Guid serviceIdentifier, + CancellationToken cancellationToken) + { + return GetClientInstanceAsync(managedType, serviceIdentifier, cancellationToken, null, null); + } + + /// + /// + /// + /// + /// + /// + /// + /// + [EditorBrowsable(EditorBrowsableState.Never)] + internal Task GetClientInstanceAsync( + Type managedType, + CancellationToken cancellationToken, + VssHttpRequestSettings settings, + DelegatingHandler[] handlers) + { + return GetClientInstanceAsync(managedType, GetServiceIdentifier(managedType), cancellationToken, settings, handlers); + } + + /// + /// + /// + /// + /// + private async Task GetClientInstanceAsync( + Type managedType, + Guid serviceIdentifier, + CancellationToken cancellationToken, + VssHttpRequestSettings settings, + DelegatingHandler[] handlers) + { + CheckForDisposed(); + ILocationService locationService = await GetServiceAsync(cancellationToken).ConfigureAwait(false); + ILocationDataProvider locationData = await locationService.GetLocationDataAsync(serviceIdentifier, cancellationToken).ConfigureAwait(false); + + if (locationData == null) + { + throw new VssServiceException(WebApiResources.ServerDataProviderNotFound(serviceIdentifier)); + } + + String serviceLocationString = await locationData.LocationForCurrentConnectionAsync( + ServiceInterfaces.LocationService2, + LocationServiceConstants.SelfReferenceIdentifier, + cancellationToken).ConfigureAwait(false); + + // This won't ever be null because of compat code in ServerDataProvider + Uri clientBaseUri = new Uri(serviceLocationString); + + VssHttpClientBase toReturn = null; + + if (settings != null) + { + toReturn = (VssHttpClientBase)Activator.CreateInstance(managedType, clientBaseUri, Credentials, settings, handlers); + } + else + { + toReturn = (VssHttpClientBase)Activator.CreateInstance(managedType, clientBaseUri, m_pipeline, false /* disposeHandler */); + } + + ApiResourceLocationCollection resourceLocations = await locationData.GetResourceLocationsAsync(cancellationToken).ConfigureAwait(false); + toReturn.SetResourceLocations(resourceLocations); + + return toReturn; + } + + /// + /// Gets the service and fallback identifiers from the [ResourceArea] attribute of the specified type + /// + private Guid GetServiceIdentifier( + Type requestedType) + { + ResourceAreaAttribute[] attributes = (ResourceAreaAttribute[])requestedType.GetTypeInfo().GetCustomAttributes(true); + + if (attributes.Length > 0) + { + return attributes[0].AreaId; + } + else + { + return Guid.Empty; + } + } + + /// + /// + /// + /// + /// + /// + private Task GetServiceInstanceAsync( + Type managedType, + Guid serviceIdentifier, + CancellationToken cancellationToken) + { + CheckForDisposed(); + IVssClientService clientService; + + try + { + // Create our instance of the managed service object. + clientService = (IVssClientService)Activator.CreateInstance(managedType); + } + catch (MissingMemberException ex) + { + throw new ArgumentException(WebApiResources.GetServiceArgumentError(managedType), ex); + } + + + // We successfully created an object, initialize him and finally set the + // return value. + clientService.Initialize(this); + + return Task.FromResult(clientService); + } + + /// + /// + /// + /// + /// + private Type GetExtensibleType(Type managedType) + { + if (managedType.GetTypeInfo().IsAbstract || managedType.GetTypeInfo().IsInterface) + { + Type extensibleType = null; + + // We can add extensible type registration for the client later (app.config? windows registry?). For now it is based solely on the attribute + if (!m_extensibleServiceTypes.TryGetValue(managedType.Name, out extensibleType)) + { + VssClientServiceImplementationAttribute[] attributes = (VssClientServiceImplementationAttribute[])managedType.GetTypeInfo().GetCustomAttributes(true); + if (attributes.Length > 0) + { + if (attributes[0].Type != null) + { + extensibleType = attributes[0].Type; + m_extensibleServiceTypes[managedType.Name] = extensibleType; + } + else if (!String.IsNullOrEmpty(attributes[0].TypeName)) + { + extensibleType = Type.GetType(attributes[0].TypeName); + + if (extensibleType != null) + { + m_extensibleServiceTypes[managedType.Name] = extensibleType; + } + else + { + Debug.Assert(false, "VssConnection: Could not load type from type name: " + attributes[0].TypeName); + } + } + } + } + + if (extensibleType == null) + { + throw new ExtensibleServiceTypeNotRegisteredException(managedType); + } + + if (!managedType.GetTypeInfo().IsAssignableFrom(extensibleType.GetTypeInfo())) + { + throw new ExtensibleServiceTypeNotValidException(managedType, extensibleType); + } + + return extensibleType; + } + else + { + return managedType; + } + } + + /// + /// Used for Testing Only + /// + /// + /// + internal void RegisterExtensibleType( + String typeName, + Type type) + { + ArgumentUtility.CheckStringForNullOrEmpty(typeName, "typeName"); + ArgumentUtility.CheckForNull(type, "type"); + + m_extensibleServiceTypes[typeName] = type; + } + + /// + /// Used for Testing Only + /// + /// + /// + internal void RegisterClientServiceInstance( + Type type, + Object instance) + { + ArgumentUtility.CheckForNull(type, "type"); + ArgumentUtility.CheckForNull(instance, "instance"); + CheckForDisposed(); + + if (!type.GetTypeInfo().IsAssignableFrom(instance.GetType().GetTypeInfo())) + { + // This is just a test method -- no need to resource the string + throw new ArgumentException("Object is not an instance of the specified type."); + } + + Type instanceType = instance.GetType(); + ClientCacheKey cacheKey = new ClientCacheKey(instanceType, GetServiceIdentifier(type)); + + // Now add the service to the service list. + RegisterExtensibleType(type.Name, instanceType); + m_cachedTypes[cacheKey] = instance; + } + + private bool m_isDisposed = false; + private object m_disposeLock = new object(); + + public void Dispose() + { + if (!m_isDisposed) + { + lock (m_disposeLock) + { + if (!m_isDisposed) + { + m_isDisposed = true; + foreach (var cachedType in m_cachedTypes.Values.Where(v => v is IDisposable).Select(v => v as IDisposable)) + { + cachedType.Dispose(); + } + m_cachedTypes.Clear(); + Disconnect(); + if (m_parentConnection != null) + { + m_parentConnection.Dispose(); + m_parentConnection = null; + } + } + } + } + } + + private void CheckForDisposed() + { + if (m_isDisposed) + { + throw new ObjectDisposedException(this.GetType().Name); + } + } + + /// + /// + /// + public Uri Uri + { + get + { + return m_baseUrl; + } + } + + /// + /// + /// + public VssHttpMessageHandler InnerHandler + { + get + { + return m_innerHandler; + } + } + + /// + /// + /// + public IEnumerable DelegatingHandlers + { + get + { + return m_delegatingHandlers; + } + } + + /// + /// + /// + public VssCredentials Credentials + { + get + { + return m_innerHandler.Credentials; + } + } + + /// + /// + /// + public VssClientHttpRequestSettings Settings + { + get + { + return (VssClientHttpRequestSettings)m_innerHandler.Settings; + } + } + + /// + /// The Guid that identifies the server associated with the VssConnection. + /// + public Guid ServerId + { + get + { + return ServerDataProvider.GetInstanceIdAsync().SyncResult(); + } + } + + /// + /// The Guid that identifies the type of server associated with the VssConnection + /// + public Guid ServerType + { + get + { + return ServerDataProvider.GetInstanceTypeAsync().SyncResult(); + } + } + + /// + /// The Id of the identity who the calls to the server are being made for. + /// + public Identity.Identity AuthorizedIdentity + { + get + { + return ServerDataProvider.GetAuthorizedIdentityAsync().SyncResult(); + } + } + + /// + /// + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public Identity.Identity AuthenticatedIdentity + { + get + { + return ServerDataProvider.GetAuthenticatedIdentityAsync().SyncResult(); + } + } + + /// + /// + /// + public Boolean HasAuthenticated + { + get + { + return ServerDataProvider.HasConnected; + } + } + + /// + /// The connection to the parent host for this VSS connection. If this connection is to a collection host, + /// then this property will return a connection to the account/tenant host. + /// The property will return null if a parent cannot be located for the current connection. + /// + public VssConnection ParentConnection + { + get + { + CheckForDisposed(); + if (m_parentConnection == null) + { + lock (m_parentConnectionLock) + { + ILocationService locationService = GetService(); + ILocationDataProvider locationData = locationService.GetLocationData(Guid.Empty); + + String applicationLocation = locationData.LocationForCurrentConnection( + ServiceInterfaces.LocationService2, + LocationServiceConstants.ApplicationIdentifier); + + if (String.IsNullOrEmpty(applicationLocation)) + { + throw new VssServiceException(WebApiResources.ServerDataProviderNotFound(LocationServiceConstants.ApplicationIdentifier)); + } + + m_parentConnection = new VssConnection( + new Uri(applicationLocation), + new VssHttpMessageHandler(Credentials, VssClientHttpRequestSettings.Default.Clone()), + null, + allowUnattributedClients: false); + } + } + + return m_parentConnection; + } + } + + /// + /// Used for testing. Do not use for product code. + /// + internal IVssServerDataProvider ServerDataProvider + { + get + { + return m_serverDataProvider; + } + set + { + // Used for testing + m_serverDataProvider = value; + } + } + + private IVssServerDataProvider m_serverDataProvider; + private VssConnection m_parentConnection; + private Object m_parentConnectionLock = new Object(); + + private readonly Uri m_baseUrl; + private readonly HttpMessageHandler m_pipeline; + private readonly VssHttpMessageHandler m_innerHandler; + private readonly IEnumerable m_delegatingHandlers; + private readonly Boolean m_allowUnattributedClients; + + private readonly ConcurrentDictionary m_loadingTypes = new ConcurrentDictionary(ClientCacheKey.Comparer); + private readonly ConcurrentDictionary m_cachedTypes = new ConcurrentDictionary(ClientCacheKey.Comparer); + private readonly ConcurrentDictionary m_extensibleServiceTypes = new ConcurrentDictionary(); + + private struct ClientCacheKey + { + public ClientCacheKey(Type type, Guid serviceIdentifier) + { + this.Type = type; + this.ServiceIdentifier = serviceIdentifier; + } + + public readonly Type Type; + public readonly Guid ServiceIdentifier; + + public static readonly IEqualityComparer Comparer = new ClientCacheKeyComparer(); + + private class ClientCacheKeyComparer : IEqualityComparer + { + public bool Equals(ClientCacheKey x, ClientCacheKey y) + { + return x.Type.Equals(y.Type) && + x.ServiceIdentifier.Equals(y.ServiceIdentifier); + } + + public int GetHashCode(ClientCacheKey obj) + { + return obj.Type.GetHashCode() ^ obj.ServiceIdentifier.GetHashCode(); + } + } + } + } + + /// + /// + /// + public interface IVssClientService + { + /// + /// + /// + /// + void Initialize(VssConnection connection); + } + + /// + /// + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Interface, AllowMultiple = false)] + [SuppressMessage("Microsoft.Design", "CA1019:DefineAccessorsForAttributeArguments", Justification = "FxCop can't tell that we have an accessor.")] + public sealed class VssClientServiceImplementationAttribute : Attribute + { + public VssClientServiceImplementationAttribute(Type type) + { + this.Type = type; + } + + public VssClientServiceImplementationAttribute(String typeName) + { + this.TypeName = typeName; + } + + public Type Type + { + get; + set; + } + + public String TypeName + { + get; + set; + } + } + + /// + /// + /// + [ExceptionMapping("0.0", "3.0", "ExtensibleServiceTypeNotRegisteredException", "GitHub.Services.Client.ExtensibleServiceTypeNotRegisteredException, GitHub.Services.Client, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ExtensibleServiceTypeNotRegisteredException : VssException + { + public ExtensibleServiceTypeNotRegisteredException(Type managedType) + : base(WebApiResources.ExtensibleServiceTypeNotRegistered(managedType.Name)) + { + } + + public ExtensibleServiceTypeNotRegisteredException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + /// + /// + /// + [ExceptionMapping("0.0", "3.0", "ExtensibleServiceTypeNotValidException", "GitHub.Services.Client.ExtensibleServiceTypeNotValidException, GitHub.Services.Client, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class ExtensibleServiceTypeNotValidException : VssException + { + public ExtensibleServiceTypeNotValidException(Type managedType, Type extensibleType) + : base(WebApiResources.ExtensibleServiceTypeNotValid(managedType.Name, extensibleType.Name)) + { + } + + public ExtensibleServiceTypeNotValidException(String message, Exception innerException) + : base(message, innerException) + { + } + } + + public class CannotGetUnattributedClientException : VssException + { + public CannotGetUnattributedClientException(Type clientType) + : base(WebApiResources.CannotGetUnattributedClient(clientType.Name)) + { + } + + public CannotGetUnattributedClientException(String message, Exception innerException) + : base(message, innerException) + { + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssConnectionParameterKeys.cs b/src/Sdk/WebApi/WebApi/VssConnectionParameterKeys.cs new file mode 100644 index 00000000000..11969bec2cd --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssConnectionParameterKeys.cs @@ -0,0 +1,25 @@ +using System.ComponentModel; + +namespace GitHub.Services.WebApi.Internal +{ + [EditorBrowsable(EditorBrowsableState.Never)] + public static class VssConnectionParameterKeys + { + public const string User = "user"; + public const string AccessToken = "accessToken"; + public const string VssConnectionMode = "vssConnectionMode"; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class VssConnectionParameterOverrideKeys + { + public const string AadInstance = "AadInstance"; + public const string AadNativeClientIdentifier = "AadClientIdentifier"; + public const string AadNativeClientRedirect = "AadNativeClientRedirect"; + public const string AadApplicationTenant = "AadApplicationTenant"; + public const string ConnectedUserRoot = "ConnectedUser"; + public const string FederatedAuthenticationMode = "FederatedAuthenticationMode"; + public const string FederatedAuthenticationUser = "FederatedAuthenticationUser"; + public const string UseAadWindowsIntegrated = "UseAadWindowsIntegrated"; + } +} diff --git a/src/Sdk/WebApi/WebApi/VssEventId.cs b/src/Sdk/WebApi/WebApi/VssEventId.cs new file mode 100644 index 00000000000..0c5fd4236d6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssEventId.cs @@ -0,0 +1,21 @@ +using System; + +namespace GitHub.Services.WebApi +{ + /// Define event log id ranges + /// This corresponds with values in Framework\Server\Common\EventLog.cs + public static class VssEventId + { + public static readonly int DefaultEventId = 0; + + // Errors + public static readonly int ExceptionBaseEventId = 3000; + + private static readonly int EtmBaseEventId = ExceptionBaseEventId + 1200; // 4200 + public static readonly int VssIdentityServiceException = EtmBaseEventId + 7; + public static readonly int AccountException = EtmBaseEventId + 36; + + //File Container Service range + public static readonly int FileContainerBaseEventId = ExceptionBaseEventId + 1700; // 4700 + } +} diff --git a/src/Sdk/WebApi/WebApi/VssHttpClientBase.cs b/src/Sdk/WebApi/WebApi/VssHttpClientBase.cs new file mode 100644 index 00000000000..d0fffda2bf7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssHttpClientBase.cs @@ -0,0 +1,1318 @@ +//----------------------------------------------------------------------- +// +// Copyright (C) 2009-2014 All Rights Reserved +// +//----------------------------------------------------------------------- + +using System; +using System.Collections; +using System.Collections.Generic; +using System.ComponentModel; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Formatting; +using System.Net.Http.Headers; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.Common.Diagnostics; +using GitHub.Services.Common.Internal; +using GitHub.Services.WebApi.Utilities.Internal; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi +{ + /// + /// This class is used as the base class for all the REST client classes. + /// It wraps a System.Net.Http.HttpClient and sets up standard defaults. + /// + public abstract class VssHttpClientBase : IDisposable + { + protected VssHttpClientBase( + Uri baseUrl, + VssCredentials credentials) + : this(baseUrl, credentials, settings: null) + { + } + + protected VssHttpClientBase( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings) + : this(baseUrl, credentials, settings: settings, handlers: null) + { + } + + protected VssHttpClientBase( + Uri baseUrl, + VssCredentials credentials, + params DelegatingHandler[] handlers) + : this(baseUrl, credentials, null, handlers) + { + } + + protected VssHttpClientBase( + Uri baseUrl, + VssCredentials credentials, + VssHttpRequestSettings settings, + params DelegatingHandler[] handlers) + : this(baseUrl, BuildHandler(credentials, settings, handlers), disposeHandler: true) + { + } + + protected VssHttpClientBase( + Uri baseUrl, + HttpMessageHandler pipeline, + bool disposeHandler) + { + m_client = new HttpClient(pipeline, disposeHandler); + + // Disable their timeout since we handle it ourselves + m_client.Timeout = TimeSpan.FromMilliseconds(-1.0); + m_client.BaseAddress = baseUrl; + m_formatter = new VssJsonMediaTypeFormatter(); + + SetServicePointOptions(); + + SetTokenStorageUrlIfNeeded(pipeline); + } + + private void SetTokenStorageUrlIfNeeded(HttpMessageHandler handler) + { + // The TokenStorageUrl should be set by the VssConnection, so that the same + // url is used for the token storage key regardless of the service this client + // talks to. If the VssHttpClient is created directly, then the best we can do + // is to set the storage url to match the base url of the client. + if (handler is VssHttpMessageHandler vssHttpMessageHandler) + { + if (vssHttpMessageHandler.Credentials != null) + { + if (vssHttpMessageHandler.Credentials.Federated != null + && vssHttpMessageHandler.Credentials.Federated.TokenStorageUrl == null) + { + vssHttpMessageHandler.Credentials.Federated.TokenStorageUrl = m_client.BaseAddress; + } + if (vssHttpMessageHandler.Credentials.Windows != null + && vssHttpMessageHandler.Credentials.Windows.TokenStorageUrl == null) + { + vssHttpMessageHandler.Credentials.Windows.TokenStorageUrl = m_client.BaseAddress; + } + } + } + else if (handler is DelegatingHandler delegatingHandler) + { + SetTokenStorageUrlIfNeeded(delegatingHandler.InnerHandler); + } + } + + private static HttpMessageHandler BuildHandler(VssCredentials credentials, VssHttpRequestSettings settings, DelegatingHandler[] handlers) + { + VssHttpMessageHandler innerHandler = new VssHttpMessageHandler(credentials, settings ?? new VssHttpRequestSettings()); + + if (null == handlers || + 0 == handlers.Length) + { + return innerHandler; + } + + return HttpClientFactory.CreatePipeline(innerHandler, handlers); + } + + /// + /// The base address. + /// + public Uri BaseAddress + { + get + { + return m_client.BaseAddress; + } + } + + /// + /// + /// + public VssResponseContext LastResponseContext + { + get { return m_LastResponseContext; } + } + + /// + /// The inner client. + /// + /// + /// Note to implementers: You should not update or expose the inner client + /// unless you instantiate your own instance of this class. Getting + /// an instance of this class from method such as GetClient<T> + /// a cached and shared instance. + /// + protected HttpClient Client + { + get + { + return m_client; + } + } + + /// + /// The media type formatter. + /// + /// + /// Note to implementers: You should not update or expose the media type formatter + /// unless you instantiate your own instance of this class. Getting + /// an instance of this class from method such as GetClient<T> + /// a cached and shared instance. + /// + protected MediaTypeFormatter Formatter + { + get + { + return m_formatter; + } + } + + /// + /// + /// + protected virtual IDictionary TranslatedExceptions + { + get + { + return null; + } + } + + protected HttpResponseMessage Send( + HttpRequestMessage message, + Object userState = null) + { + try + { + var response = SendAsync(message, userState); + + return response.Result; + } + catch (AggregateException ag) + { + ag = ag.Flatten(); + if (ag.InnerExceptions.Count == 1) + { + throw ag.InnerExceptions[0]; + } + throw; + } + } + + protected Task DeleteAsync( + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Delete, + locationId, + routeValues, + version, + /*content:*/ null, + queryParameters, + userState, + cancellationToken); + } + + protected Task GetAsync( + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Get, + locationId, + routeValues, + version, + /*content:*/ null, + queryParameters, + userState, + cancellationToken); + } + + protected Task GetAsync( + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Get, + locationId, + routeValues, + version, + /*content:*/ null, + queryParameters, + userState, + cancellationToken); + } + + protected Task PatchAsync( + T value, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ s_patchMethod.Value, + locationId, + routeValues, + version, + /*content:*/ new ObjectContent(value, m_formatter), + queryParameters, + userState, + cancellationToken); + } + + protected Task PatchAsync( + T value, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ s_patchMethod.Value, + locationId, + routeValues, + version, + /*content:*/ new ObjectContent(value, m_formatter), + queryParameters, + userState, + cancellationToken); + } + + protected Task PostAsync( + T value, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Post, + locationId, + routeValues, + version, + /*content:*/ new ObjectContent(value, m_formatter), + queryParameters, + userState, + cancellationToken); + } + + protected Task PostAsync( + T value, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Post, + locationId, + routeValues, + version, + /*content:*/ new ObjectContent(value, m_formatter), + queryParameters, + userState, + cancellationToken); + } + + protected Task PutAsync( + T value, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Put, + locationId, + routeValues, + version, + /*content:*/ new ObjectContent(value, m_formatter), + queryParameters, + userState, + cancellationToken); + } + + protected Task PutAsync( + T value, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return this.SendAsync( + /*method:*/ HttpMethod.Put, + locationId, + routeValues, + version, + /*content:*/ new ObjectContent(value, m_formatter), + queryParameters, + userState, + cancellationToken); + } + + protected Task SendAsync( + HttpMethod method, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + return SendAsync(method, null, locationId, routeValues, version, content, queryParameters, userState, cancellationToken); + } + + protected async Task SendAsync( + HttpMethod method, + IEnumerable> additionalHeaders, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync(method, additionalHeaders, locationId, routeValues, version, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + } + + protected async Task SendAsync( + HttpMethod method, + IEnumerable> additionalHeaders, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync(method, additionalHeaders, locationId, routeValues, version, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + } + + protected HttpResponseMessage Send( + HttpMethod method, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = CreateRequestMessageAsync(method, locationId, routeValues, version, content, queryParameters, userState, CancellationToken.None).SyncResult()) + { + return Send(requestMessage, userState); + } + } + + protected async Task SendAsync( + HttpMethod method, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync(method, locationId, routeValues, version, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + return await SendAsync(requestMessage, userState, cancellationToken).ConfigureAwait(false); + } + } + + protected async Task SendAsync( + HttpMethod method, + Guid locationId, + HttpCompletionOption completionOption, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + using (VssTraceActivity.GetOrCreate().EnterCorrelationScope()) + using (HttpRequestMessage requestMessage = await CreateRequestMessageAsync(method, locationId, routeValues, version, content, queryParameters, userState, cancellationToken).ConfigureAwait(false)) + { + return await SendAsync(requestMessage, completionOption, userState, cancellationToken).ConfigureAwait(false); + } + } + + /// + /// Create an HTTP request message for the given location, replacing parameters in the location's route template + /// with values in the supplied routeValues dictionary. + /// + /// HTTP verb to use + /// Id of the location to use + /// Values to use to replace parameters in the location's route template + /// Version to send in the request or null to use the VSS latest API version + /// The mediatype to set in request header. + /// HttpRequestMessage + protected Task CreateRequestMessageAsync( + HttpMethod method, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + String mediaType = c_jsonMediaType) + { + return CreateRequestMessageAsync(method, null, locationId, routeValues, version, content, queryParameters, userState, cancellationToken, mediaType); + } + + /// + /// Create an HTTP request message for the given location, replacing parameters in the location's route template + /// with values in the supplied routeValues dictionary. + /// + /// HTTP verb to use + /// Id of the location to use + /// Values to use to replace parameters in the location's route template + /// Version to send in the request or null to use the VSS latest API version + /// The mediatype to set in request header. + /// HttpRequestMessage + protected virtual async Task CreateRequestMessageAsync( + HttpMethod method, + IEnumerable> additionalHeaders, + Guid locationId, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken), + String mediaType = c_jsonMediaType) + { + // Lookup the location + ApiResourceLocation location = await GetResourceLocationAsync(locationId, userState, cancellationToken).ConfigureAwait(false); + if (location == null) + { + throw new VssResourceNotFoundException(locationId, BaseAddress); + } + + return CreateRequestMessage(method, additionalHeaders, location, routeValues, version, content, queryParameters, mediaType); + } + + /// + /// Create an HTTP request message for the given location, replacing parameters in the location's route template + /// with values in the supplied routeValues dictionary. + /// + /// HTTP verb to use + /// API resource location + /// Values to use to replace parameters in the location's route template + /// Version to send in the request or null to use the VSS latest API version + /// The mediatype to set in request header. + /// HttpRequestMessage + protected HttpRequestMessage CreateRequestMessage( + HttpMethod method, + ApiResourceLocation location, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + String mediaType = c_jsonMediaType) + { + return CreateRequestMessage(method, null, location, routeValues, version, content, queryParameters, mediaType); + } + + /// + /// Create an HTTP request message for the given location, replacing parameters in the location's route template + /// with values in the supplied routeValues dictionary. + /// + /// HTTP verb to use + /// API resource location + /// Values to use to replace parameters in the location's route template + /// Version to send in the request or null to use the VSS latest API version + /// The mediatype to set in request header. + /// HttpRequestMessage + protected HttpRequestMessage CreateRequestMessage( + HttpMethod method, + IEnumerable> additionalHeaders, + ApiResourceLocation location, + Object routeValues = null, + ApiResourceVersion version = null, + HttpContent content = null, + IEnumerable> queryParameters = null, + String mediaType = c_jsonMediaType) + { + CheckForDisposed(); + // Negotiate the request version to send + ApiResourceVersion requestVersion = NegotiateRequestVersion(location, version); + if (requestVersion == null) + { + throw new VssVersionNotSupportedException(location, version.ApiVersion, location.MinVersion, BaseAddress); + } + + // Construct the url + Dictionary valuesDictionary = VssHttpUriUtility.ToRouteDictionary(routeValues, location.Area, location.ResourceName); + + String locationRelativePath = VssHttpUriUtility.ReplaceRouteValues(location.RouteTemplate, valuesDictionary); + Uri locationUri = VssHttpUriUtility.ConcatUri(BaseAddress, locationRelativePath); + if (queryParameters != null && queryParameters.Any()) + { + locationUri = locationUri.AppendQuery(queryParameters); + } + + // Create the message and populate headers + HttpRequestMessage requestMessage = new HttpRequestMessage(method, locationUri.AbsoluteUri); + + MediaTypeWithQualityHeaderValue acceptType = CreateAcceptHeader(requestVersion, mediaType); + + if (m_excludeUrlsHeader) + { + acceptType.Parameters.Add(new NameValueHeaderValue(VssHttpRequestSettings.ExcludeUrlsHeader, "true")); + } + if (m_lightweightHeader) + { + acceptType.Parameters.Add(new NameValueHeaderValue(VssHttpRequestSettings.LightweightHeader, "true")); + } + + requestMessage.Headers.Accept.Add(acceptType); + + if (additionalHeaders != null) + { + foreach (KeyValuePair kvp in additionalHeaders) + { + requestMessage.Headers.Add(kvp.Key, kvp.Value); + } + } + + if (content != null) + { + requestMessage.Content = content; + if (requestMessage.Content.Headers.ContentType != null && !requestMessage.Content.Headers.ContentType.Parameters.Any(p => p.Name.Equals(ApiResourceVersionExtensions.c_apiVersionHeaderKey))) + { + // add the api-version to the content header, which will be used by the JsonCompatConverter to know which version of the model to convert to. + requestMessage.Content.Headers.ContentType.Parameters.Add(new NameValueHeaderValue(ApiResourceVersionExtensions.c_apiVersionHeaderKey, requestVersion.ToString())); + } + } + + return requestMessage; + } + + protected virtual MediaTypeWithQualityHeaderValue CreateAcceptHeader(ApiResourceVersion requestVersion, String mediaType) + { + MediaTypeWithQualityHeaderValue acceptType = new MediaTypeWithQualityHeaderValue(mediaType); + acceptType.Parameters.AddApiResourceVersionValues(requestVersion, replaceExisting: true, useLegacyFormat: requestVersion.ApiVersion.Major <= 1); + return acceptType; + } + + protected virtual void AddModelAsQueryParams(IList> queryParams, string parameterName, object model) + { + JObject jObject = JObject.FromObject(model, new VssJsonMediaTypeFormatter().CreateJsonSerializer()); + AddModelAsQueryParams(queryParams, parameterName, jObject); + } + + protected virtual void AddIEnumerableAsQueryParams(IList> queryParams, string parameterName, object model) + { + JArray jArray = JArray.FromObject(model, new VssJsonMediaTypeFormatter().CreateJsonSerializer()); + AddModelAsQueryParams(queryParams, parameterName, jArray); + } + + private void AddModelAsQueryParams(IList> queryParams, string parameterName, JObject jObject) + { + foreach (JProperty property in jObject.Properties()) + { + AddModelAsQueryParams(queryParams, parameterName, property); + } + } + + private void AddModelAsQueryParams(IList> queryParams, string key, JProperty property) + { + if (property.Value != null) + { + string newKey = string.Format("{0}[{1}]", key, property.Name); + AddModelAsQueryParams(queryParams, newKey, property.Value); + } + } + + private void AddModelAsQueryParams(IList> queryParams, string key, JArray array) + { + int i = 0; + foreach (JToken childToken in array.Children()) + { + string newKey = string.Format("{0}[{1}]", key, i); + AddModelAsQueryParams(queryParams, newKey, childToken); + i++; + } + } + + private void AddModelAsQueryParams(IList> queryParams, string key, JToken token) + { + if (token.Type == JTokenType.Array) + { + AddModelAsQueryParams(queryParams, key, (JArray)token); + } + else if (token.Type == JTokenType.Object) + { + AddModelAsQueryParams(queryParams, key, (JObject)token); + } + else if (token.Type == JTokenType.Property) + { + AddModelAsQueryParams(queryParams, key, (JProperty)token); + } + else if (token.Type == JTokenType.Date) + { + AddDateTimeToQueryParams(queryParams, key, (DateTime)token); + } + else + { + queryParams.Add(key, token.ToString()); + } + } + + /// + /// Ensures we are using a standard format for sending DateTime value as a query parameter (o: 2015-02-16T16:11:31.1398684Z) + /// + /// + /// + /// local DateTime value + protected void AddDateTimeToQueryParams(IList> queryParams, String name, DateTime localDateTime) + { + // converting to universal time to match json serialization server is using + queryParams.Add(name, localDateTime.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture)); + } + + /// + /// Ensures we are using a standard format for sending DateTime value as a query parameter (o: 2015-02-16T16:11:31.1398684Z) + /// + /// + /// + /// + protected void AddDateTimeToQueryParams(IList> queryParams, String name, DateTimeOffset dateTimeOffset) + { + queryParams.Add(name, dateTimeOffset.ToString("o", CultureInfo.InvariantCulture)); + } + + /// + /// Ensures we are using a standard format (HTTP-date) for sending DateTime value as header + /// (r: Wed, 1 Jan 2016 18:43:31 GMT) per W3C specification. + /// + /// + /// + /// + protected void AddDateTimeToHeaders(IList> queryParams, String name, DateTimeOffset dateTimeOffset) + { + queryParams.Add(name, dateTimeOffset.ToString("r", CultureInfo.InvariantCulture)); + } + + protected async Task SendAsync( + HttpRequestMessage message, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + //ConfigureAwait(false) enables the continuation to be run outside + //any captured SyncronizationContext (such as ASP.NET's) which keeps things + //from deadlocking... + using (HttpResponseMessage response = await this.SendAsync(message, userState, cancellationToken).ConfigureAwait(false)) + { + return await ReadContentAsAsync(response, cancellationToken).ConfigureAwait(false); + } + } + + protected async Task ReadContentAsAsync(HttpResponseMessage response, CancellationToken cancellationToken = default(CancellationToken)) + { + CheckForDisposed(); + Boolean isJson = IsJsonResponse(response); + bool mismatchContentType = false; + try + { + //deal with wrapped collections in json + if (isJson && + typeof(IEnumerable).GetTypeInfo().IsAssignableFrom(typeof(T).GetTypeInfo()) && + !typeof(Byte[]).GetTypeInfo().IsAssignableFrom(typeof(T).GetTypeInfo()) && + !typeof(JObject).GetTypeInfo().IsAssignableFrom(typeof(T).GetTypeInfo())) + { + // expect it to come back wrapped, if it isn't it is a bug! + var wrapper = await ReadJsonContentAsync>(response, cancellationToken).ConfigureAwait(false); + return wrapper.Value; + } + else if (isJson) + { + return await ReadJsonContentAsync(response, cancellationToken).ConfigureAwait(false); + } + } + catch (JsonReaderException) + { + // We thought the content was JSON but failed to parse. + // In this case, do nothing and utilize the HandleUnknownContentType call below + mismatchContentType = true; + } + + if (HasContent(response)) + { + return await HandleInvalidContentType(response, mismatchContentType).ConfigureAwait(false); + } + else + { + return default(T); + } + } + + protected virtual async Task ReadJsonContentAsync(HttpResponseMessage response, CancellationToken cancellationToken = default(CancellationToken)) + { + return await response.Content.ReadAsAsync(new[] { m_formatter }, cancellationToken).ConfigureAwait(false); + } + + protected Task SendAsync( + HttpRequestMessage message, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + // the default in httpClient for HttpCompletionOption is ResponseContentRead so that is what we do here + return this.SendAsync( + message, + /*completionOption:*/ HttpCompletionOption.ResponseContentRead, + userState, + cancellationToken); + } + + protected async Task SendAsync( + HttpRequestMessage message, + HttpCompletionOption completionOption, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + CheckForDisposed(); + if (message.Headers.UserAgent != null) + { + foreach (ProductInfoHeaderValue headerValue in UserAgentUtility.GetDefaultRestUserAgent()) + { + if (!message.Headers.UserAgent.Contains(headerValue)) + { + message.Headers.UserAgent.Add(headerValue); + } + } + } + + VssTraceActivity traceActivity = VssTraceActivity.GetOrCreate(); + using (traceActivity.EnterCorrelationScope()) + { + if (userState != null) + { + message.Properties[UserStatePropertyName] = userState; + } + + if (!message.Headers.Contains(Common.Internal.HttpHeaders.VssE2EID)) + { + message.Headers.Add(Common.Internal.HttpHeaders.VssE2EID, Guid.NewGuid().ToString("D")); + } + VssHttpEventSource.Log.HttpRequestStart(traceActivity, message); + message.Trace(); + message.Properties[VssTraceActivity.PropertyName] = traceActivity; + + // Send the completion option to the inner handler stack so we know when it's safe to buffer + // and when we should avoid buffering. + message.Properties[VssHttpRequestSettings.HttpCompletionOptionPropertyName] = completionOption; + + //ConfigureAwait(false) enables the continuation to be run outside + //any captured SyncronizationContext (such as ASP.NET's) which keeps things + //from deadlocking... + HttpResponseMessage response = await Client.SendAsync(message, completionOption, cancellationToken).ConfigureAwait(false); + + // Inject delay or failure for testing + if (TestDelay != TimeSpan.Zero) + { + await ProcessDelayAsync().ConfigureAwait(false); + } + + await HandleResponseAsync(response, cancellationToken).ConfigureAwait(false); + + return response; + } + } + + [Obsolete("Use VssHttpClientBase.HandleResponseAsync instead")] + protected virtual void HandleResponse(HttpResponseMessage response) + { + + } + + protected virtual async Task HandleResponseAsync( + HttpResponseMessage response, + CancellationToken cancellationToken) + { + response.Trace(); + VssHttpEventSource.Log.HttpRequestStop(VssTraceActivity.Current, response); + + m_LastResponseContext = new VssResponseContext(response.StatusCode, response.Headers); + + if (response.StatusCode == HttpStatusCode.ProxyAuthenticationRequired) + { + throw (m_LastResponseContext.Exception = new ProxyAuthenticationRequiredException()); + } + else if (ShouldThrowError(response)) + { + Exception exToThrow = null; + if (IsJsonResponse(response)) + { + exToThrow = await UnwrapExceptionAsync(response.Content, cancellationToken).ConfigureAwait(false); + } + + if (exToThrow == null || !(exToThrow is VssException)) + { + String message = null; + if (exToThrow != null) + { + message = exToThrow.Message; + } + + IEnumerable serviceError; + if (response.Headers.TryGetValues(Common.Internal.HttpHeaders.TfsServiceError, out serviceError)) + { + message = UriUtility.UrlDecode(serviceError.FirstOrDefault()); + } + else if (String.IsNullOrEmpty(message) && !String.IsNullOrEmpty(response.ReasonPhrase)) + { + message = response.ReasonPhrase; + } + exToThrow = new VssServiceResponseException(response.StatusCode, message, exToThrow); + } + + m_LastResponseContext.Exception = exToThrow; + throw exToThrow; + } + } + + protected async Task UnwrapExceptionAsync(HttpContent content, CancellationToken cancellationToken) + { + WrappedException wrappedException = await content.ReadAsAsync(new MediaTypeFormatter[] { m_formatter }, cancellationToken).ConfigureAwait(false); + return wrappedException.Unwrap(this.TranslatedExceptions); + } + + protected virtual bool ShouldThrowError(HttpResponseMessage response) + { + return !response.IsSuccessStatusCode; + } + + /// + /// Negotiate the appropriate request version to use for the given api resource location, based on + /// the client and server capabilities + /// + /// Id of the API resource location + /// Client version to attempt to use (use the latest VSS API version if unspecified) + /// Max API version supported on the server that is less than or equal to the client version. Returns null if the server does not support this location or this version of the client. + protected async Task NegotiateRequestVersionAsync( + Guid locationId, + ApiResourceVersion version = null, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + ApiResourceLocation location = await GetResourceLocationAsync(locationId, userState, cancellationToken).ConfigureAwait(false); + if (location == null) + { + return null; + } + else + { + return NegotiateRequestVersion(location, version); + } + } + + /// + /// Negotiate the appropriate request version to use for the given api resource location, based on + /// the client and server capabilities + /// + /// Location of the API resource + /// Client version to attempt to use (use the latest VSS API version if unspecified) + /// Max API version supported on the server that is less than or equal to the client version. Returns null if the server does not support this location or this version of the client. + protected ApiResourceVersion NegotiateRequestVersion( + ApiResourceLocation location, + ApiResourceVersion version = null) + { + if (version == null) + { + version = m_defaultApiVersion; + } + + if (location.MinVersion > version.ApiVersion) + { + // Client is older than the server. The server no longer supports this resource (deprecated). + return null; + } + else if (location.MaxVersion < version.ApiVersion) + { + // Client is newer than the server. Negotiate down to the latest version on the server + ApiResourceVersion negotiatedVersion = new ApiResourceVersion(location.MaxVersion, 0); + negotiatedVersion.IsPreview = location.ReleasedVersion < location.MaxVersion; + return negotiatedVersion; + } + else + { + // We can send at the requested api version. Make sure the resource version is not bigger than what the server supports + int resourceVersion = Math.Min(version.ResourceVersion, location.ResourceVersion); + ApiResourceVersion negotiatedVersion = new ApiResourceVersion(version.ApiVersion, resourceVersion); + if (location.ReleasedVersion < version.ApiVersion) + { + negotiatedVersion.IsPreview = true; + } + else + { + negotiatedVersion.IsPreview = version.IsPreview; + } + return negotiatedVersion; + } + } + + /// + /// Sets the ApiResourceLocationCollection for this VssHttpClientBase. + /// If unset and needed, the data will be fetched through an OPTIONS request. + /// + public void SetResourceLocations(ApiResourceLocationCollection resourceLocations) + { + if (null == m_resourceLocations) + { + m_resourceLocations = resourceLocations; + } + } + + /// + /// Adds the excludeUrls=true accept header to the requests generated by this client. + /// If respected by the server, urls will not be included in the responses. + /// + public bool ExcludeUrlsHeader + { + get + { + return m_excludeUrlsHeader; + } + + set + { + m_excludeUrlsHeader = value; + } + } + + + /// + /// Add the lightWeight=true option to the accept header in the requests generated by this client. + /// If respected by the server, light weight responses carrying only basic metadata information + /// will be returned and urls will be excluded. + /// + public bool LightweightHeader + { + get + { + return m_lightweightHeader; + } + set + { + m_lightweightHeader = value; + } + } + + /// + /// Get information about an API resource location by its location id + /// + /// Id of the API resource location + /// + protected async Task GetResourceLocationAsync( + Guid locationId, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + CheckForDisposed(); + await EnsureResourceLocationsPopulated(userState, cancellationToken).ConfigureAwait(false); + return m_resourceLocations.TryGetLocationById(locationId); + } + + internal virtual async Task> GetResourceLocationsAsync( + Boolean allHostTypes, + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + CheckForDisposed(); + // Send an Options request to retrieve all api resource locations if the collection of resource locations is not populated. + Uri optionsUri = VssHttpUriUtility.ConcatUri(BaseAddress, allHostTypes ? c_optionsRelativePathWithAllHostTypes : c_optionsRelativePath); + using (HttpRequestMessage optionsRequest = new HttpRequestMessage(HttpMethod.Options, optionsUri)) + { + return await SendAsync>(optionsRequest, userState, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + internal async Task EnsureResourceLocationsPopulated( + Object userState = null, + CancellationToken cancellationToken = default(CancellationToken)) + { + if (m_resourceLocations == null) + { + // Send an Options request to retrieve all api resource locations if the collection of resource locations is not populated. + Uri optionsUri = VssHttpUriUtility.ConcatUri(BaseAddress, c_optionsRelativePath); + IEnumerable locations = await GetResourceLocationsAsync(allHostTypes: false, userState: userState, cancellationToken: cancellationToken).ConfigureAwait(false); + ApiResourceLocationCollection resourceLocations = new ApiResourceLocationCollection(); + resourceLocations.AddResourceLocations(locations); + m_resourceLocations = resourceLocations; + } + } + + private Boolean HasContent(HttpResponseMessage response) + { + if (response != null && + response.StatusCode != HttpStatusCode.NoContent && + response.RequestMessage?.Method != HttpMethod.Head && + response.Content?.Headers != null && + (!response.Content.Headers.ContentLength.HasValue || + (response.Content.Headers.ContentLength.HasValue && response.Content.Headers.ContentLength != 0))) + { + return true; + } + + return false; + } + + private Boolean IsJsonResponse( + HttpResponseMessage response) + { + if (HasContent(response) + && response.Content.Headers != null && response.Content.Headers.ContentType != null + && !String.IsNullOrEmpty(response.Content.Headers.ContentType.MediaType)) + { + return (0 == String.Compare("application/json", response.Content.Headers.ContentType.MediaType, StringComparison.OrdinalIgnoreCase)); + } + + return false; + } + + private async Task HandleInvalidContentType(HttpResponseMessage response, bool isMismatchedContentType) + { + //the response is not Json, cannot read it with Json formatter, get the string and throw an exception + String responseType = response.Content?.Headers?.ContentType?.MediaType ?? "Unknown"; + using (var responseStream = await response.Content.ReadAsStreamAsync().ConfigureAwait(false)) + { + using (var streamReader = new StreamReader(responseStream)) + { + //read at most 4K + const int oneK = 1024; + char[] contentBuffer = new char[4 * oneK]; + int contentLength = 0; + for (int i = 0; i < 4; i++) + { + int read = await streamReader.ReadAsync(contentBuffer, i * oneK, oneK).ConfigureAwait(false); + contentLength += read; + if (read < oneK) break; + } + + string responseText; + if (isMismatchedContentType) + { + responseText = $"Mismatched response content type. {responseType} Response Content: {new String(contentBuffer, 0, contentLength)}"; + } + else + { + responseText = $"Invalid response content type: {responseType} Response Content: {new String(contentBuffer, 0, contentLength)}"; + } + + throw new VssServiceResponseException(response.StatusCode, responseText, null); + } + } + } + + private void SetServicePointOptions() + { + if (BaseAddress != null) + { + ServicePoint servicePoint = ServicePointManager.FindServicePoint(BaseAddress); + servicePoint.UseNagleAlgorithm = false; + servicePoint.SetTcpKeepAlive( + enabled: true, + keepAliveTime: c_keepAliveTime, + keepAliveInterval: c_keepAliveInterval); + } + } + + // ServicePoint defaults + private const int c_keepAliveTime = 30000; + private const int c_keepAliveInterval = 5000; + + #region IDisposable Support + private bool m_isDisposed = false; + private object m_disposeLock = new object(); + + [Obsolete("This overload of Dispose has been deprecated. Use the Dispose() method.")] + [EditorBrowsable(EditorBrowsableState.Never)] + protected virtual void Dispose(bool disposing) + { + if (disposing) + { + Dispose(); + } + } + + public void Dispose() + { + if (!m_isDisposed) + { + lock (m_disposeLock) + { + if (!m_isDisposed) + { + m_isDisposed = true; + m_client.Dispose(); + } + } + } + } + + private void CheckForDisposed() + { + if (m_isDisposed) + { + throw new ObjectDisposedException(this.GetType().Name); + } + } + #endregion + + protected IEnumerable GetHeaderValue(HttpResponseMessage response, string headerName) + { + IEnumerable headerValue; + if (!response.Headers.TryGetValues(headerName, out headerValue)) + { + if (response.Content != null) + { + response.Content.Headers.TryGetValues(headerName, out headerValue); + } + } + return headerValue; + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static TimeSpan TestDelay { get; set; } + + private async Task ProcessDelayAsync() + { + await Task.Delay(Math.Abs((Int32)TestDelay.TotalMilliseconds)).ConfigureAwait(false); + if (TestDelay < TimeSpan.Zero) + { + throw new Exception("User injected failure."); + } + } + + /// + /// Internal for testing only. + /// + internal bool HasResourceLocations + { + get + { + return m_resourceLocations != null; + } + } + + private readonly HttpClient m_client; + private MediaTypeFormatter m_formatter; + private VssResponseContext m_LastResponseContext; + private ApiResourceLocationCollection m_resourceLocations; + private ApiResourceVersion m_defaultApiVersion = new ApiResourceVersion(1.0); + + /// + /// Client option to suppress the generation of links in the responses for the requests made by this client. + /// If set, "excludeUrls=true" will be appended to the Accept header of the request. + /// + private bool m_excludeUrlsHeader; + + /// + /// Client option to generate lightweight responses that carry only basic metadata information for the + /// requests made by this client. Links should not be generated either. + /// If set, "lightweight=true" will be appended to the Accept header of the request. + /// + private bool m_lightweightHeader; + + private Lazy s_patchMethod = new Lazy(() => new HttpMethod("PATCH")); + + /// + /// This is only needed for the Options request that we are making right now. Eventually + /// we will use the Location Service and the Options request will not be needed and we can remove this. + /// + private const String c_optionsRelativePath = "_apis/"; + + private const String c_optionsRelativePathWithAllHostTypes = "_apis/?allHostTypes=true"; + + private const String c_jsonMediaType = "application/json"; + + public readonly static String UserStatePropertyName = "VssClientBaseUserState"; + + protected sealed class OperationScope : IDisposable + { + public OperationScope( + String area, + String operation) + { + m_area = area; + m_operation = operation; + m_activity = VssTraceActivity.GetOrCreate(); + m_correlationScope = m_activity.EnterCorrelationScope(); + VssHttpEventSource.Log.HttpOperationStart(m_activity, m_area, operation); + } + + public void Dispose() + { + if (!m_disposed) + { + m_disposed = true; + VssHttpEventSource.Log.HttpOperationStop(m_activity, m_area, m_operation); + + if (m_correlationScope != null) + { + m_correlationScope.Dispose(); + m_correlationScope = null; + } + } + } + + private String m_area; + private String m_operation; + private Boolean m_disposed; + private VssTraceActivity m_activity; + private IDisposable m_correlationScope; + } + + } +} diff --git a/src/Sdk/WebApi/WebApi/VssHttpUriUtility.cs b/src/Sdk/WebApi/WebApi/VssHttpUriUtility.cs new file mode 100644 index 00000000000..e3d6f858013 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssHttpUriUtility.cs @@ -0,0 +1,229 @@ +using System; +using System.Collections.Generic; +using System.ComponentModel; +using System.Text; + +namespace GitHub.Services.WebApi +{ + public static class VssHttpUriUtility + { + /// + /// Replace values in a templated route with the given route values dictionary. + /// + /// + /// + /// Set true to escape the replaced route Uri string prior to returning it + /// Set true to append any unused routeValues as query parameters to the returned route + /// If set to true requires all the route parameters to be explicitly passed in routeParams + /// + public static String ReplaceRouteValues( + String routeTemplate, + Dictionary routeValues, + bool escapeUri = false, + bool appendUnusedAsQueryParams = false, + bool requireExplicitRouteParams = false) + { + RouteReplacementOptions routeReplacementOptions = escapeUri ? RouteReplacementOptions.EscapeUri : 0; + routeReplacementOptions |= appendUnusedAsQueryParams ? RouteReplacementOptions.AppendUnusedAsQueryParams : 0; + routeReplacementOptions |= requireExplicitRouteParams ? RouteReplacementOptions.RequireExplicitRouteParams : 0; + + return ReplaceRouteValues( + routeTemplate, + routeValues, + routeReplacementOptions); + } + + /// + /// Replace values in a templated route with the given route values dictionary. + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public static String ReplaceRouteValues( + String routeTemplate, + Dictionary routeValues, + RouteReplacementOptions routeReplacementOptions) + { + StringBuilder sbResult = new StringBuilder(); + StringBuilder sbCurrentPathPart = new StringBuilder(); + int paramStart = -1, paramLength = 0; + bool insideParam = false; + HashSet unusedValues = new HashSet(routeValues.Keys, StringComparer.OrdinalIgnoreCase); + Dictionary caseIncensitiveRouteValues = new Dictionary(routeValues, StringComparer.OrdinalIgnoreCase); + + for (int i = 0; i < routeTemplate.Length; i++) + { + char c = routeTemplate[i]; + + if (insideParam) + { + if (c == '}') + { + insideParam = false; + String paramName = routeTemplate.Substring(paramStart, paramLength); + paramLength = 0; + if (paramName.StartsWith("*")) + { + if (routeReplacementOptions.HasFlag(RouteReplacementOptions.WildcardAsQueryParams)) + { + continue; + } + // wildcard route + paramName = paramName.Substring(1); + } + + Object paramValue; + if (caseIncensitiveRouteValues.TryGetValue(paramName, out paramValue)) + { + if (paramValue != null) + { + sbCurrentPathPart.Append(paramValue.ToString()); + unusedValues.Remove(paramName); + } + } + else if (routeReplacementOptions.HasFlag(RouteReplacementOptions.RequireExplicitRouteParams)) + { + throw new ArgumentException("Missing route param " + paramName); + } + } + else + { + paramLength++; + } + } + else + { + if (c == '/') + { + if (sbCurrentPathPart.Length > 0) + { + sbResult.Append('/'); + sbResult.Append(sbCurrentPathPart.ToString()); + sbCurrentPathPart.Clear(); + } + } + else if (c == '{') + { + if ((i + 1) < routeTemplate.Length && routeTemplate[i + 1] == '{') + { + // Escaped '{' + sbCurrentPathPart.Append(c); + i++; + } + else + { + insideParam = true; + paramStart = i + 1; + } + } + else if (c == '}') + { + sbCurrentPathPart.Append(c); + if ((i + 1) < routeTemplate.Length && routeTemplate[i + 1] == '}') + { + // Escaped '}' + i++; + } + } + else + { + sbCurrentPathPart.Append(c); + } + } + } + + if (sbCurrentPathPart.Length > 0) + { + sbResult.Append('/'); + sbResult.Append(sbCurrentPathPart.ToString()); + } + + if (routeReplacementOptions.HasFlag(RouteReplacementOptions.EscapeUri)) + { + sbResult = new StringBuilder(Uri.EscapeUriString(sbResult.ToString())); + } + + if (routeReplacementOptions.HasFlag(RouteReplacementOptions.AppendUnusedAsQueryParams) && unusedValues.Count > 0) + { + bool isFirst = true; + + foreach (String paramName in unusedValues) + { + Object paramValue; + if (caseIncensitiveRouteValues.TryGetValue(paramName, out paramValue) && paramValue != null) + { + sbResult.Append(isFirst ? '?' : '&'); + isFirst = false; + sbResult.Append(Uri.EscapeDataString(paramName)); + sbResult.Append('='); + sbResult.Append(Uri.EscapeDataString(paramValue.ToString())); + } + } + } + + return sbResult.ToString(); + } + + /// + /// Create a route values dictionary, and add the specified area and resource if they aren't present. + /// + /// + /// Area name + /// Resource name + /// + public static Dictionary ToRouteDictionary(Object routeValues, string area, string resourceName) + { + Dictionary valuesDictionary = VssHttpUriUtility.ToRouteDictionary(routeValues); + VssHttpUriUtility.AddRouteValueIfNotPresent(valuesDictionary, "area", area); + VssHttpUriUtility.AddRouteValueIfNotPresent(valuesDictionary, "resource", resourceName); + + return valuesDictionary; + } + + public static Uri ConcatUri(Uri baseUri, String relativeUri) + { + StringBuilder sbCombined = new StringBuilder(baseUri.GetLeftPart(UriPartial.Path).TrimEnd('/')); + sbCombined.Append('/'); + sbCombined.Append(relativeUri.TrimStart('/')); + sbCombined.Append(baseUri.Query); + return new Uri(sbCombined.ToString()); + } + + public static Dictionary ToRouteDictionary(Object values) + { + if (values == null) + { + return new Dictionary(); + } + else if (values is Dictionary) + { + return (Dictionary)values; + } + else + { + Dictionary dictionary = new Dictionary(); + foreach (PropertyDescriptor descriptor in TypeDescriptor.GetProperties(values)) + { + dictionary[descriptor.Name] = descriptor.GetValue(values); + } + return dictionary; + } + } + private static void AddRouteValueIfNotPresent(Dictionary dictionary, String key, Object value) + { + if (!dictionary.ContainsKey(key)) + { + dictionary.Add(key, value); + } + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + [Flags] + public enum RouteReplacementOptions + { + None = 0, + EscapeUri = 1, + AppendUnusedAsQueryParams = 2, + RequireExplicitRouteParams = 4, + WildcardAsQueryParams = 8, + } +} diff --git a/src/Sdk/WebApi/WebApi/VssJsonCollectionWrapper.cs b/src/Sdk/WebApi/WebApi/VssJsonCollectionWrapper.cs new file mode 100644 index 00000000000..e4a2365b62f --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssJsonCollectionWrapper.cs @@ -0,0 +1,119 @@ +using System; +using System.Collections; +using System.Linq; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi +{ + + [DataContract] + public abstract class VssJsonCollectionWrapperBase : ISecuredObject + { + protected VssJsonCollectionWrapperBase() + { + } + + public VssJsonCollectionWrapperBase(IEnumerable source) + { + if (source == null) + { + this.Count = 0; + } + else if (source is ICollection) + { + this.Count = ((ICollection)source).Count; + } + else + { + this.Count = source.Cast().Count(); + } + this._value = source; + } + + [DataMember(Order=0)] + public Int32 Count { get; private set; } + + //not serialized from here, see sub class... + private IEnumerable _value; + + protected IEnumerable BaseValue + { + get + { + return _value; + } + set + { + _value = value; + } + } + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => throw new NotImplementedException(); + + int ISecuredObject.RequiredPermissions => throw new NotImplementedException(); + + string ISecuredObject.GetToken() => throw new NotImplementedException(); + #endregion + } + + [DataContract] + public sealed class VssJsonCollectionWrapper : VssJsonCollectionWrapperBase + { + public VssJsonCollectionWrapper() + : base() + { + } + public VssJsonCollectionWrapper(IEnumerable source) + : base(source) + { + } + + [DataMember(Order = 1)] + public IEnumerable Value + { + get + { + return BaseValue; + } + private set + { + BaseValue = value; + } + } + + } + + /// + /// This class is used to serialized collections as a single + /// JSON object on the wire, to avoid serializing JSON arrays + /// directly to the client, which can be a security hole + /// + /// + [DataContract] + public sealed class VssJsonCollectionWrapper : VssJsonCollectionWrapperBase + { + public VssJsonCollectionWrapper() + : base() + { + } + + public VssJsonCollectionWrapper(IEnumerable source) + :base (source) + { + } + + [DataMember] + public T Value + { + get + { + return (T)BaseValue; + } + private set + { + BaseValue = (IEnumerable)value; + } + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssJsonCreationConverter.cs b/src/Sdk/WebApi/WebApi/VssJsonCreationConverter.cs new file mode 100644 index 00000000000..7c50ddbcbc8 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssJsonCreationConverter.cs @@ -0,0 +1,39 @@ +using System; +using System.Reflection; +using Newtonsoft.Json; +using Newtonsoft.Json.Linq; + +namespace GitHub.Services.WebApi +{ + public abstract class VssJsonCreationConverter : VssSecureJsonConverter + where T : class + { + protected abstract T Create(Type objectType, JObject jsonObject); + + public override bool CanConvert(Type objectType) + { + return typeof(T).GetTypeInfo().IsAssignableFrom(objectType.GetTypeInfo()); + } + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (reader.TokenType == JsonToken.Null) + { + return null; + } + + var jsonObject = JObject.Load(reader); + var target = Create(objectType, jsonObject); + serializer.Populate(jsonObject.CreateReader(), target); + return target; + } + + // fallback to default behavior for writes + public override bool CanWrite => false; + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + throw new NotImplementedException(); + } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssJsonMediaTypeFormatter.cs b/src/Sdk/WebApi/WebApi/VssJsonMediaTypeFormatter.cs new file mode 100644 index 00000000000..d4a711bfccd --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssJsonMediaTypeFormatter.cs @@ -0,0 +1,206 @@ +using System; +using System.Collections; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Formatting; +using System.Net.Http.Headers; +using System.Reflection; +using System.Runtime.Serialization; +using System.Threading.Tasks; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Patch; +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; +using Newtonsoft.Json.Linq; +using Newtonsoft.Json.Serialization; + +namespace GitHub.Services.WebApi +{ + /// + /// Constants related to JSON serialization customizations + /// + public static class VssJsonSerializationConstants + { + /// + /// Header which indicates to serialize enums as numbers. + /// + public const string EnumsAsNumbersHeader = "enumsAsNumbers"; + + /// + /// Header which indicates to serialize dates using the Microsoft Ajax date format + /// + public const string MsDateFormatHeader = "msDateFormat"; + + /// + /// Header which indicates to return a root array in a JSON response rather than wrapping it in an object + /// + public const string NoArrayWrapHeader = "noArrayWrap"; + } + + + public class VssJsonMediaTypeFormatter : JsonMediaTypeFormatter + { + /// + /// + /// + /// This should typically be false. A true value will cause the wrapping to be skipped which is neccesary when creating ObjectContent from arrays on client to prepare a request + public VssJsonMediaTypeFormatter(bool bypassSafeArrayWrapping = false) + : this(bypassSafeArrayWrapping, false, false) + { + } + + public VssJsonMediaTypeFormatter(bool bypassSafeArrayWrapping, bool enumsAsNumbers = false, bool useMsDateFormat = false) + { + this.SetSerializerSettings(bypassSafeArrayWrapping, enumsAsNumbers, useMsDateFormat); + } + + /// + /// + /// + /// This should typically be false. A true value will cause the wrapping to be skipped which is neccesary when creating ObjectContent from arrays on client to prepare a request + public VssJsonMediaTypeFormatter(HttpRequestMessage request, bool bypassSafeArrayWrapping = false) + { + Request = request; + SerializerSettings.Context = new StreamingContext(0, Request); + + bool enumsAsNumbers = String.Equals("true", GetAcceptHeaderOptionValue(request, VssJsonSerializationConstants.EnumsAsNumbersHeader), StringComparison.OrdinalIgnoreCase); + bool useMsDateFormat = String.Equals("true", GetAcceptHeaderOptionValue(request, VssJsonSerializationConstants.MsDateFormatHeader), StringComparison.OrdinalIgnoreCase); + if (!bypassSafeArrayWrapping) + { + // We can override the array-wrapping behavior based on a header. We haven't supported Firefox pre-2.0 in years, and even then the array prototype exploit + // is not possible if you have to send a custom header in the request to get a JSON array to be returned. + bypassSafeArrayWrapping = String.Equals("true", GetAcceptHeaderOptionValue(request, VssJsonSerializationConstants.NoArrayWrapHeader), StringComparison.OrdinalIgnoreCase); + } + + this.SetSerializerSettings(bypassSafeArrayWrapping, enumsAsNumbers, useMsDateFormat); + } + + private void SetSerializerSettings(bool bypassSafeArrayWrapping, bool enumsAsNumbers, bool useMsDateFormat) + { + this.SerializerSettings.ContractResolver = GetContractResolver(enumsAsNumbers); + + if (!enumsAsNumbers) + { + // Serialze enums as camelCased string values + this.SerializerSettings.Converters.Add(new StringEnumConverter { CamelCaseText = true }); + } + + if (useMsDateFormat) + { + this.SerializerSettings.DateFormatHandling = DateFormatHandling.MicrosoftDateFormat; + } + + m_bypassSafeArrayWrapping = bypassSafeArrayWrapping; + + EnumsAsNumbers = enumsAsNumbers; + UseMsDateFormat = useMsDateFormat; + } + + protected virtual IContractResolver GetContractResolver(bool enumsAsNumbers) + { + if (enumsAsNumbers) + { + return new VssCamelCasePropertyNamesPreserveEnumsContractResolver(); + } + else + { + return new VssCamelCasePropertyNamesContractResolver(); + } + } + + protected HttpRequestMessage Request { get; private set; } + + /// + /// Whether or not to wrap a root array into an object with a "value" property equal to the array. + /// This protects against an old browser vulnerability (Firefox 2.0) around overriding the 'Array' + /// prototype and referencing a REST endpoint through in a script tag, and stealing the results + /// cross-origin. + /// + public Boolean BypassSafeArrayWrapping + { + get + { + return m_bypassSafeArrayWrapping; + } + set + { + m_bypassSafeArrayWrapping = value; + } + } + + /// + /// True if enums are serialized as numbers rather than user-friendly strings + /// + public Boolean EnumsAsNumbers { get; private set; } + + /// + /// True if dates are to be emitted using MSJSON format rather than ISO format. + /// + public Boolean UseMsDateFormat { get; private set; } + + public override MediaTypeFormatter GetPerRequestFormatterInstance(Type type, HttpRequestMessage request, MediaTypeHeaderValue mediaType) + { + if (GetType().Equals(typeof(VssJsonMediaTypeFormatter))) // ensures we don't return a VssJsonMediaTypeFormatter when this instance is not a VssJsonMediaTypeFormatter + { + return new VssJsonMediaTypeFormatter(request, m_bypassSafeArrayWrapping); + } + else + { + return base.GetPerRequestFormatterInstance(type, request, mediaType); // basically returns this instance + } + } + + private String GetAcceptHeaderOptionValue(HttpRequestMessage request, String acceptOptionName) + { + foreach (var header in request.Headers.Accept) + { + foreach (var parameter in header.Parameters) + { + if (String.Equals(parameter.Name, acceptOptionName, StringComparison.OrdinalIgnoreCase)) + { + return parameter.Value; + } + } + } + + return null; + } + + /// + /// Because JSON PATCH and JSON both use the JSON format, we explicitly are + /// blocking the default JSON formatter from being able to read the PATCH + /// format. + /// + public override bool CanReadType(Type type) + { + return !type.IsOfType(typeof(IPatchDocument<>)); + } + + public override Task WriteToStreamAsync(Type type, object value, Stream writeStream, HttpContent content, TransportContext transportContext) + { + // Do not wrap byte arrays as this is incorrect behavior (they are written as base64 encoded strings and + // not as array objects like other types). + + Type typeToWrite = type; + if (!m_bypassSafeArrayWrapping + && typeof(IEnumerable).GetTypeInfo().IsAssignableFrom(type.GetTypeInfo()) + && !type.Equals(typeof(Byte[])) + && !type.Equals(typeof(JObject))) + { + typeToWrite = typeof(VssJsonCollectionWrapper); + + // IEnumerable will need to be materialized if they are currently not. + object materializedValue = value is ICollection || value is string ? + value : // Use the regular input if it is already materialized or it is a string + ((IEnumerable)value)?.Cast().ToList() ?? value; // Otherwise, try materialize it + + value = new VssJsonCollectionWrapper((IEnumerable)materializedValue); + } + return base.WriteToStreamAsync(typeToWrite, value, writeStream, content, transportContext); + } + + private bool m_bypassSafeArrayWrapping; + } +} diff --git a/src/Sdk/WebApi/WebApi/VssRequestTimerTrace.cs b/src/Sdk/WebApi/WebApi/VssRequestTimerTrace.cs new file mode 100644 index 00000000000..86826b0b3c6 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssRequestTimerTrace.cs @@ -0,0 +1,181 @@ +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Runtime.InteropServices; + +namespace GitHub.Services.WebApi +{ + internal static class HttpMessageExtensions + { + private const string tracerKey = "VSS_HTTP_TIMER_TRACE"; + + internal static void Trace(this HttpRequestMessage request) + { + Object tracerObj = null; + VssRequestTimerTrace tracer = null; + if (request.Properties.TryGetValue(tracerKey, out tracerObj)) + { + tracer = tracerObj as VssRequestTimerTrace; + Debug.Assert(tracer != null, "Tracer object is the wrong type!"); + } + else + { + tracer = new VssRequestTimerTrace(); + request.Properties[tracerKey] = tracer; + } + + if (tracer != null) + { + tracer.TraceRequest(request); + } + } + + internal static void Trace(this HttpResponseMessage response) + { + Object tracerObj = null; + VssRequestTimerTrace tracer = null; + if (response.RequestMessage.Properties.TryGetValue(tracerKey, out tracerObj)) + { + tracer = tracerObj as VssRequestTimerTrace; + Debug.Assert(tracer != null, "Tracer object is the wrong type!"); + } + + if (tracer != null) + { + tracer.TraceResponse(response); + } + } + } + + // a little class to trace perf of web requests + // does nothing without TRACE set + internal class VssRequestTimerTrace + { + internal VssRequestTimerTrace() + { +#if TRACE + _requestTimer = new Stopwatch(); +#endif + } + internal void TraceRequest(HttpRequestMessage message) + { +#if TRACE + string requestString = message.GetRequestString(); + + VssPerformanceEventSource.Log.RESTStart(Guid.Empty, requestString); + _requestTimer.Start(); + +#if !NETSTANDARD + EventActivityIdControl(1, ref _activityId); +#endif +#endif + } + internal void TraceResponse(HttpResponseMessage response) + { +#if TRACE + _requestTimer.Stop(); + String responseString = response.GetResponseString(_requestTimer.ElapsedMilliseconds); + +#if !NETSTANDARD + VssPerformanceEventSource.Log.RESTStop(Guid.Empty, _activityId, responseString, _requestTimer.ElapsedMilliseconds); +#endif +#endif + } +#if TRACE + private Stopwatch _requestTimer; +#if !NETSTANDARD + private Guid _activityId; +#endif +#endif + +#if !NETSTANDARD + [DllImport("ADVAPI32.DLL", ExactSpelling = true, EntryPoint = "EventActivityIdControl")] + internal static extern uint EventActivityIdControl([In] int ControlCode, [In][Out] ref Guid ActivityId); +#endif + } + +#if TRACE + internal static class VssRequestLoggingExtensions + { + internal static String GetRequestString(this HttpRequestMessage message) + { + String verb, area, resource; + Guid vssE2EId; + + TryGetHeaderGuid(message.Headers, Common.Internal.HttpHeaders.VssE2EID, out vssE2EId); + + ExtractRequestStrings(message, out verb, out resource, out area); + + return String.Format(CultureInfo.InvariantCulture, _requestFormat, message.RequestUri.AbsoluteUri, verb, resource, area, vssE2EId); + } + + internal static String GetResponseString(this HttpResponseMessage response, long milliseconds) + { + String verb, area, resource; + Guid activityId = Guid.Empty, vssE2EId = Guid.Empty; + + ExtractRequestStrings(response.RequestMessage, out verb, out resource, out area); + + TryGetHeaderGuid(response.Headers, Common.Internal.HttpHeaders.VssE2EID, out vssE2EId); + TryGetHeaderGuid(response.Headers, Common.Internal.HttpHeaders.ActivityId, out activityId); + + return String.Format(CultureInfo.InvariantCulture, _responseFormat, response.RequestMessage.RequestUri.AbsoluteUri, verb, resource, area, vssE2EId, activityId, milliseconds); + } + + private static void ExtractRequestStrings(HttpRequestMessage message, out String verb, out String resource, out String area) + { + verb = message.Method.ToString().ToUpper(); + resource = _unknown; + area = _unknown; + + int segments = message.RequestUri.Segments.Length; + + if (segments > 0) + { + //if we did our REST APIs right the resource had better be the last + //segment. + resource = message.RequestUri.Segments[segments - 1].TrimEnd('/'); + } + + for (int i = 0; i < segments; i++) + { + //area should be the first segment after _apis + //some resources don't have an area, so it will be the same + //which is OK, we'll know what it means :) + if (String.Compare(message.RequestUri.Segments[i], _apis, StringComparison.OrdinalIgnoreCase) == 0) + { + if (segments > (i + 1)) + { + area = message.RequestUri.Segments[i + 1].TrimEnd('/'); + } + break; + } + } + } + + private static bool TryGetHeaderGuid(HttpHeaders headers, string key, out Guid value) + { + IEnumerable values; + value = Guid.Empty; + if (headers.TryGetValues(key, out values)) + { + return Guid.TryParse(values.FirstOrDefault(), out value); + } + + return false; + } + + //[URI] (VERB)RESOURCE[AREA] E2EId: E2EId + private const String _requestFormat = "Web method running: [{0}] ({1}){2}[{3}] E2EId: {4}"; + //[URI] (VERB)RESOURCE[AREA] E2EId: E2EId, ActivityId: ActivityId N ms + private const String _responseFormat = "Web method response: [{0}] ({1}){2}[{3}] E2EId: {4}, ActivityId: {5} {6} ms"; + private const String _unknown = ""; + private const String _apis = "_apis/"; + } +#endif +} diff --git a/src/Sdk/WebApi/WebApi/VssResponseContext.cs b/src/Sdk/WebApi/WebApi/VssResponseContext.cs new file mode 100644 index 00000000000..e3060f2f278 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssResponseContext.cs @@ -0,0 +1,74 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http.Headers; +using System.Text.RegularExpressions; +using GitHub.Services.Common; +using Newtonsoft.Json; + +namespace GitHub.Services.WebApi +{ + public class VssResponseContext + { + internal VssResponseContext(HttpStatusCode statusCode, HttpResponseHeaders headers) + { + if (headers.Contains(Common.Internal.HttpHeaders.ActivityId)) + { + IEnumerable values = headers.GetValues(Common.Internal.HttpHeaders.ActivityId); + string activityId = values.FirstOrDefault(); + Guid result; + Guid.TryParse(activityId, out result); + ActivityId = result; + } + + IEnumerable headerValues; + if (headers.TryGetValues(PerformanceTimerConstants.Header, out headerValues)) + { + Timings = JsonConvert.DeserializeObject>(headerValues.First()); + } + + HttpStatusCode = statusCode; + Headers = headers; + } + + public bool TryGetException(out Exception value) + { + value = Exception; + return Exception != null; + } + + public bool TryGetErrorCode(out string value) + { + value = null; + if (Exception == null) + { + return false; + } + var message = Exception.Message; + var match = Regex.Match(message, @"(TF[0-9]+)"); + if (match.Success) + { + value = match.Value; + return true; + } + match = Regex.Match(message, @"(VSS[0-9]+)"); + if (match.Success) + { + value = match.Value; + return true; + } + return false; + } + + public HttpStatusCode HttpStatusCode { get; private set; } + + public Guid ActivityId { get; private set; } + + public Exception Exception { get; internal set; } + + public IDictionary Timings { get; private set; } + + public HttpResponseHeaders Headers { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssSecureJsonConverter.cs b/src/Sdk/WebApi/WebApi/VssSecureJsonConverter.cs new file mode 100644 index 00000000000..3280dfb6d76 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssSecureJsonConverter.cs @@ -0,0 +1,59 @@ +using System; +using System.ComponentModel; +using Newtonsoft.Json; +using Newtonsoft.Json.Converters; + +namespace GitHub.Services.WebApi +{ + public abstract class VssSecureJsonConverter : JsonConverter + { + public override abstract bool CanConvert(Type objectType); + + public override abstract object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer); + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + Validate(value, serializer); + } + + private void Validate(object value, JsonSerializer serializer) + { + VssSecureJsonConverterHelper.Validate?.Invoke(value, serializer); + } + } + + public abstract class VssSecureCustomCreationConverter : CustomCreationConverter + { + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + Validate(value, serializer); + } + + private void Validate(object value, JsonSerializer serializer) + { + VssSecureJsonConverterHelper.Validate?.Invoke(value, serializer); + } + } + + public abstract class VssSecureDateTimeConverterBase : DateTimeConverterBase + { + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + Validate(value, serializer); + } + + private void Validate(object value, JsonSerializer serializer) + { + VssSecureJsonConverterHelper.Validate?.Invoke(value, serializer); + } + } + + [EditorBrowsable(EditorBrowsableState.Never)] + public static class VssSecureJsonConverterHelper + { + /// + /// The action to validate the object being converted. + /// + public static Action Validate { get; set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs b/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs new file mode 100644 index 00000000000..473229514a5 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssServiceResponseException.cs @@ -0,0 +1,36 @@ +using System; +using System.Diagnostics.CodeAnalysis; +using System.Net; +using System.Runtime.Serialization; +using System.Security; +using GitHub.Services.Common; + +namespace GitHub.Services.WebApi +{ + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1032:ImplementStandardExceptionConstructors")] + [ExceptionMapping("0.0", "3.0", "VssServiceResponseException", "GitHub.Services.WebApi.VssServiceResponseException, GitHub.Services.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a")] + public class VssServiceResponseException : VssServiceException + { + public VssServiceResponseException(HttpStatusCode code, String message, Exception innerException) + : base(message, innerException) + { + this.HttpStatusCode = code; + } + + protected VssServiceResponseException(SerializationInfo info, StreamingContext context) + : base(info, context) + { + HttpStatusCode = (HttpStatusCode)info.GetInt32("HttpStatusCode"); + } + + [SecurityCritical] + public override void GetObjectData(SerializationInfo info, StreamingContext context) + { + base.GetObjectData(info, context); + info.AddValue("HttpStatusCode", (int)HttpStatusCode); + } + + public HttpStatusCode HttpStatusCode { get; private set; } + } +} diff --git a/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs b/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs new file mode 100644 index 00000000000..70c3ebf05c7 --- /dev/null +++ b/src/Sdk/WebApi/WebApi/VssSigningCredentials.cs @@ -0,0 +1,490 @@ +using System; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using GitHub.Services.Common; +using GitHub.Services.WebApi.Jwt; + +namespace GitHub.Services.WebApi +{ + /// + /// Provides a contract for signing, and verifying signatures of, blobs of data. + /// + public abstract class VssSigningCredentials + { + protected VssSigningCredentials() + { + m_effectiveDate = DateTime.UtcNow; + } + + /// + /// Gets a value indicating whether or not this token may be used to sign data. + /// + public abstract Boolean CanSignData + { + get; + } + + /// + /// Gets the size of the key, in bits, used for signing and verification. + /// + public abstract Int32 KeySize + { + get; + } + + /// + /// Gets the date from which this signing token is valid. + /// + public virtual DateTime ValidFrom + { + get + { + return m_effectiveDate; + } + } + + /// + /// Gets the datetime at which this signing token expires. + /// + public virtual DateTime ValidTo + { + get + { + return DateTime.MaxValue; + } + } + + /// + /// Gets the signature algorithm used by this signing token. + /// + public abstract JWTAlgorithm SignatureAlgorithm + { + get; + } + + /// + /// Signs the array with the signing key associated with the token. + /// + /// The data which should be signed + /// A blob of data representing the signature of the input data + /// Thrown when the current instance cannot be used for signing + public virtual Byte[] SignData(Byte[] input) + { + if (!CanSignData) + { + throw new InvalidOperationException(); + } + + return GetSignature(input); + } + + /// + /// Signs the array with the signing key associated with the token. + /// + /// The data which should be signed + /// A blob of data representing the signature of the input data + protected abstract Byte[] GetSignature(Byte[] input); + + /// + /// Verifies the signature of the input data, returning true if the signature is valid. + /// + /// The data which should be signed + /// The signature which should be verified + /// True if the provided signature matches the current signing token; otherwise, false + public abstract Boolean VerifySignature(Byte[] input, Byte[] signature); + + /// + /// Creates a new VssSigningCredentials instance using the specified instance + /// as the signing key. + /// + /// The certificate which contains the key used for signing and verification + /// A new VssSigningCredentials instance which uses the specified certificate for signing + public static VssSigningCredentials Create(X509Certificate2 certificate) + { + ArgumentUtility.CheckForNull(certificate, nameof(certificate)); + + if (certificate.HasPrivateKey) + { + +// Once we move the ClientObjectModelTargetFrameworkVersion to 4.6 we should remove the #else sections +// in this file. The NETSTANDARD sections should be the code for both NetStandard and desktop. 4.5 does +// not support these new cryptography classes, which is why we need these #ifs for now. +#if NETSTANDARD + var rsa = certificate.GetRSAPrivateKey(); + if (rsa == null) + { + throw new SignatureAlgorithmUnsupportedException(certificate.SignatureAlgorithm.FriendlyName); + } +#else + var rsa = certificate.PrivateKey as RSACryptoServiceProvider; + if (rsa == null) + { + throw new SignatureAlgorithmUnsupportedException(certificate.PrivateKey.SignatureAlgorithm); + } + + if (rsa.CspKeyContainerInfo.ProviderType != 24) + { + throw new SignatureAlgorithmUnsupportedException(rsa.CspKeyContainerInfo.ProviderType); + } +#endif + + if (rsa.KeySize < c_minKeySize) + { + throw new InvalidCredentialsException(JwtResources.SigningTokenKeyTooSmall()); + } + } + + return new X509Certificate2SigningToken(certificate); + } + +#if NETSTANDARD + /// + /// Creates a new VssSigningCredentials instance using the specified + /// callback function to retrieve the signing key. + /// + /// The factory which creates RSA keys used for signing and verification + /// A new VssSigningCredentials instance which uses the specified provider for signing + public static VssSigningCredentials Create(Func factory) + { + ArgumentUtility.CheckForNull(factory, nameof(factory)); + + using (var rsa = factory()) + { + if (rsa == null) + { + throw new InvalidCredentialsException(JwtResources.SignatureAlgorithmUnsupportedException("None")); + } + + if (rsa.KeySize < c_minKeySize) + { + throw new InvalidCredentialsException(JwtResources.SigningTokenKeyTooSmall()); + } + + return new RSASigningToken(factory, rsa.KeySize); + } + } +#else + /// + /// Creates a new VssSigningCredentials instance using the specified + /// callback function to retrieve the signing key. + /// + /// The factory which creates RSACryptoServiceProvider keys used for signing and verification + /// A new VssSigningCredentials instance which uses the specified provider for signing + public static VssSigningCredentials Create(Func factory) + { + ArgumentUtility.CheckForNull(factory, nameof(factory)); + + using (var rsa = factory()) + { + if (rsa == null) + { + throw new InvalidCredentialsException(JwtResources.SignatureAlgorithmUnsupportedException("None")); + } + + if (rsa.KeySize < c_minKeySize) + { + throw new InvalidCredentialsException(JwtResources.SigningTokenKeyTooSmall()); + } + + return new RSASigningToken(factory, rsa.KeySize); + } + } +#endif + + /// + /// Creates a new VssSigningCredentials instance using the specified as the signing + /// key. The returned signing token performs symmetric key signing and verification. + /// + /// The key used for signing and verification + /// A new VssSigningCredentials instance which uses the specified key for signing + public static VssSigningCredentials Create(Byte[] key) + { + ArgumentUtility.CheckForNull(key, nameof(key)); + + // Probably should have validation here, but there was none previously + return new SymmetricKeySigningToken(key); + } + + private const Int32 c_minKeySize = 2048; + private readonly DateTime m_effectiveDate; + +#region Concrete Implementations + + private class SymmetricKeySigningToken : VssSigningCredentials + { + public SymmetricKeySigningToken(Byte[] key) + { + m_key = new Byte[key.Length]; + Buffer.BlockCopy(key, 0, m_key, 0, m_key.Length); + } + + public override Boolean CanSignData + { + get + { + return true; + } + } + + public override Int32 KeySize + { + get + { + return m_key.Length * 8; + } + } + + public override JWTAlgorithm SignatureAlgorithm + { + get + { + return JWTAlgorithm.HS256; + } + } + + protected override Byte[] GetSignature(Byte[] input) + { + using (var hash = new HMACSHA256(m_key)) + { + return hash.ComputeHash(input); + } + } + + public override Boolean VerifySignature( + Byte[] input, + Byte[] signature) + { + var computedSignature = SignData(input); + return SecureCompare.TimeInvariantEquals(computedSignature, signature); + } + + private readonly Byte[] m_key; + } + + private abstract class AsymmetricKeySigningToken : VssSigningCredentials + { + protected abstract Boolean HasPrivateKey(); + + public override JWTAlgorithm SignatureAlgorithm + { + get + { + return JWTAlgorithm.RS256; + } + } + + public override Boolean CanSignData + { + get + { + if (m_hasPrivateKey == null) + { + m_hasPrivateKey = HasPrivateKey(); + } + return m_hasPrivateKey.Value; + } + } + + private Boolean? m_hasPrivateKey; + } + + private class X509Certificate2SigningToken : AsymmetricKeySigningToken, IJsonWebTokenHeaderProvider + { + public X509Certificate2SigningToken(X509Certificate2 certificate) + { + m_certificate = certificate; + } + + public override Int32 KeySize + { + get + { +#if NETSTANDARD + return m_certificate.GetRSAPublicKey().KeySize; +#else + return m_certificate.PublicKey.Key.KeySize; +#endif + } + } + + public override DateTime ValidFrom + { + get + { + return m_certificate.NotBefore; + } + } + + public override DateTime ValidTo + { + get + { + return m_certificate.NotAfter; + } + } + + public override Boolean VerifySignature( + Byte[] input, + Byte[] signature) + { +#if NETSTANDARD + var rsa = m_certificate.GetRSAPublicKey(); + return rsa.VerifyData(input, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); +#else + var rsa = m_certificate.PublicKey.Key as RSACryptoServiceProvider; + + using (var hash = SHA256CryptoServiceProvider.Create()) + { + return rsa.VerifyData(input, hash, signature); + } +#endif + } + + protected override Byte[] GetSignature(Byte[] input) + { +#if NETSTANDARD + var rsa = m_certificate.GetRSAPrivateKey(); + return rsa.SignData(input, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); +#else + var rsa = m_certificate.PrivateKey as RSACryptoServiceProvider; + + using (var hash = SHA256CryptoServiceProvider.Create()) + { + return rsa.SignData(input, hash); + } +#endif + } + + protected override Boolean HasPrivateKey() + { + return m_certificate.HasPrivateKey; + } + + void IJsonWebTokenHeaderProvider.SetHeaders(IDictionary headers) + { + headers[JsonWebTokenHeaderParameters.X509CertificateThumbprint] = m_certificate.GetCertHash().ToBase64StringNoPadding(); + } + + private readonly X509Certificate2 m_certificate; + } + +#if NETSTANDARD + private class RSASigningToken : AsymmetricKeySigningToken + { + public RSASigningToken( + Func factory, + Int32 keySize) + { + m_keySize = keySize; + m_factory = factory; + } + + public override Int32 KeySize + { + get + { + return m_keySize; + } + } + + protected override Byte[] GetSignature(Byte[] input) + { + using (var rsa = m_factory()) + { + return rsa.SignData(input, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + } + } + + protected override Boolean HasPrivateKey() + { + try + { + // As unfortunate as this is, there is no way to tell from an RSA implementation, based on querying + // properties alone, if it supports signature operations or has a private key. This is a one-time + // hit for the signing credentials implementation, so it shouldn't be a huge deal. + GetSignature(new Byte[1] { 1 }); + return true; + } + catch (CryptographicException) + { + return false; + } + } + + public override Boolean VerifySignature( + Byte[] input, + Byte[] signature) + { + using (var rsa = m_factory()) + { + return rsa.VerifyData(input, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + } + } + + private readonly Int32 m_keySize; + private readonly Func m_factory; + } +#else + private class RSASigningToken : AsymmetricKeySigningToken + { + public RSASigningToken( + Func factory, + Int32 keySize) + { + m_keySize = keySize; + m_factory = factory; + } + + public override Int32 KeySize + { + get + { + return m_keySize; + } + } + + protected override Byte[] GetSignature(Byte[] input) + { + using (var rsa = m_factory()) + using (var hash = new SHA256CryptoServiceProvider()) + { + return rsa.SignData(input, hash); + } + } + + protected override Boolean HasPrivateKey() + { + try + { + // As unfortunate as this is, there is no way to tell from an RSA implementation, based on querying + // properties alone, if it supports signature operations or has a private key. This is a one-time + // hit for the signing credentials implementation, so it shouldn't be a huge deal. + GetSignature(new Byte[1] { 1 }); + return true; + } + catch (CryptographicException) + { + return false; + } + } + + public override Boolean VerifySignature( + Byte[] input, + Byte[] signature) + { + using (var rsa = m_factory()) + using (var hash = new SHA256CryptoServiceProvider()) + { + return rsa.VerifyData(input, hash, signature); + } + } + + private readonly Int32 m_keySize; + private readonly Func m_factory; + } +#endif + + #endregion + } +} diff --git a/src/Sdk/WebApi/WebApi/WrappedException.cs b/src/Sdk/WebApi/WebApi/WrappedException.cs new file mode 100644 index 00000000000..a202b0ea26b --- /dev/null +++ b/src/Sdk/WebApi/WebApi/WrappedException.cs @@ -0,0 +1,621 @@ +using GitHub.Services.Common; +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Runtime.Serialization; + +namespace GitHub.Services.WebApi +{ + [DataContract(IsReference = true)] + public class WrappedException : ISecuredObject + { + public WrappedException() + { + } + + public WrappedException(Exception exception, Boolean includeErrorDetail, Version restApiVersion) + { + // if we have an AggregateException AND there is only one exception beneath it, let's use that + // exception here instead. for S2S calls, the exception coming in will be an Aggregate and will + // loose type information if only the Aggregate is returned to the caller. + if ((exception is AggregateException) && + (((AggregateException)exception).InnerExceptions != null) && + (((AggregateException)exception).Flatten().InnerExceptions.Count == 1)) + { + // take the first one + exception = ((AggregateException)exception).Flatten().InnerException; + } + + // Populate the Type, TypeName, and TypeKey properties. + Type type = exception.GetType(); + String typeName, typeKey; + + if (exception is VssServiceResponseException) + { + /* + * VssServiceResponseException takes an HttpStatusCode in its constructor which is + * not compatible with the WrappableConstructors. Its not necessary to persist the + * status code since it is bound to the response, so we just cast down to + * VssServiceException to avoid conflict when unwrapping + */ + // It is okay for VssServiceResponseExceptions to happen on the server during S2S scenarios. + // just do the translation -- don't Debug.Fail! + //Debug.Fail("Do not throw VssServiceResponseException from the server side."); + type = typeof(VssServiceException); + VssException.GetTypeNameAndKeyForExceptionType(type, restApiVersion, out typeName, out typeKey); + } + else if (exception is VssServiceException) + { + ((VssServiceException)exception).GetTypeNameAndKey(restApiVersion, out typeName, out typeKey); + } + else + { + // Fall back to the base implementation + VssException.GetTypeNameAndKeyForExceptionType(type, restApiVersion, out typeName, out typeKey); + } + + this.Type = type; + this.TypeName = typeName; + this.TypeKey = typeKey; + + if (includeErrorDetail && exception.InnerException != null) + { + InnerException = new WrappedException(exception.InnerException, includeErrorDetail, restApiVersion); + } + + Message = exception.Message; + + if (includeErrorDetail) + { + //if the exception was not thrown, it won't have a stack trace, so + //capture it here in that case. Skip last two frames, we don't want WrappedException + //or its caller on the stack. + StackTrace = exception.StackTrace ?? new StackTrace(2, true).ToString(); + } + + if (!string.IsNullOrWhiteSpace(exception.HelpLink)) + { + HelpLink = exception.HelpLink; + } + + if (exception is VssException) + { + EventId = ((VssException)exception).EventId; + ErrorCode = ((VssException)exception).ErrorCode; + } + + TryWrapCustomProperties(exception); + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public Dictionary CustomProperties + { + get; + set; + } + + [DataMember] + public WrappedException InnerException { get; set; } + + public Exception UnwrappedInnerException { get; set; } + + [DataMember] + public String Message { get; set; } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public String HelpLink { get; set; } + + public Type Type + { + get + { + if (m_type == null) + { + //try to create the type from the TypeName + if (!String.IsNullOrEmpty(TypeName)) + { + m_type = LoadType(TypeName); + } + } + + return m_type; + } + + set + { + m_type = value; + } + } + + private Type m_type; + + private string m_typeName; + + [DataMember] + public String TypeName + { + get + { + return m_typeName; + } + + set + { + if (value.Contains("Microsoft.VisualStudio")) + { + m_typeName = value.Replace("Microsoft.VisualStudio", "GitHub"); + m_typeName = m_typeName.Substring(0, m_typeName.IndexOf(",")) + ", Sdk"; + } + else if (value.Contains("Microsoft.Azure.DevOps")) + { + m_typeName = value.Replace("Microsoft.Azure.DevOps", "GitHub"); + m_typeName = m_typeName.Substring(0, m_typeName.IndexOf(",")) + ", Sdk"; + } + else if (value.Contains("Microsoft.TeamFoundation")) + { + m_typeName = value.Replace("Microsoft.TeamFoundation", "GitHub"); + m_typeName = m_typeName.Substring(0, m_typeName.IndexOf(",")) + ", Sdk"; + } + else + { + m_typeName = value; + } + } + } + + [DataMember] + public String TypeKey + { + get; + set; + } + + [DataMember] + public int ErrorCode + { + get; + set; + } + + [DataMember] + public int EventId + { + get; + set; + } + + [DataMember(EmitDefaultValue = false, IsRequired = false)] + public string StackTrace + { + get; + set; + } + + public Exception Unwrap(IDictionary typeMapping) + { + Exception innerException = null; + if (InnerException != null) + { + innerException = InnerException.Unwrap(typeMapping); + UnwrappedInnerException = innerException; + } + + Exception exception = null; + + // if they have bothered to map type, use that first. + if (!String.IsNullOrEmpty(TypeKey)) + { + Type type; + if (typeMapping != null && typeMapping.TryGetValue(TypeKey, out type) || + baseTranslatedExceptions.TryGetValue(TypeKey, out type)) + { + try + { + this.Type = type; + exception = Activator.CreateInstance(this.Type, Message, innerException) as Exception; + } + catch (Exception) + { + // do nothing + } + } + } + + if (exception == null) + { + //no standard mapping, fallback to + exception = UnWrap(innerException); + } + + if (exception is VssException) + { + ((VssException)exception).EventId = this.EventId; + ((VssException)exception).ErrorCode = this.ErrorCode; + } + + if (exception == null && !String.IsNullOrEmpty(Message)) + { + // NOTE: We can get exceptions that we can't create, IE. SqlException, AzureExceptions. + // This is not a failure, we will just wrap the exception in a VssServiceException + // since the type is not available. + exception = new VssServiceException(Message, innerException); + } + + if (exception == null && !string.IsNullOrEmpty(TypeName)) + { + Debug.Assert(false, string.Format("Server exception cannot be resolved. Type name: {0}", TypeName)); + } + + if (exception != null + && !string.IsNullOrEmpty(HelpLink)) + { + exception.HelpLink = HelpLink; + } + + if (exception != null + && !string.IsNullOrEmpty(this.StackTrace)) + { + FieldInfo stackTraceField = typeof(Exception).GetTypeInfo().GetDeclaredField("_stackTraceString"); + if (stackTraceField != null && !stackTraceField.Attributes.HasFlag(FieldAttributes.Public) && !stackTraceField.Attributes.HasFlag(FieldAttributes.Static)) + { + stackTraceField.SetValue(exception, this.StackTrace); + } + } + + if (exception != null && exception.GetType() == this.Type) + { + TryUnWrapCustomProperties(exception); + } + + return exception; + } + + private Exception UnWrap(Exception innerException) + { + Exception exception = null; + if (this.Type != null) // m_type is typically null when this.Type getter is hit from here, so the LoadType method will get invoked here. + { + try + { + Object[] args = null; + + ConstructorInfo info = GetMatchingConstructor(new[] { typeof(WrappedException) }); + if (info != null) + { + // a constructor overload on an exception that takes a WrappedException, is useful + // in cases where the other constructors manipulate the string that we pass in, + // which we do not want to happen when unwrapping an exception. + args = new object[] { this }; + } + else + { + info = GetMatchingConstructor(new[] { typeof(String), typeof(Exception) }); + if (info != null) + { + args = new object[] { Message, innerException }; + } + else + { + //try just string + info = GetMatchingConstructor(new[] { typeof(String) }); + if (info != null) + { + args = new object[] { Message }; + } + else + { + //try default constructor + info = GetMatchingConstructor(new Type[0]); + } + } + } + if (info != null) + { + exception = info.Invoke(args) as Exception; + // only check exceptions that derive from VssExceptions, since we don't have control + // to make code changes to exceptions that we don't own. + Debug.Assert(!(exception is VssException) || exception.Message == Message, + "The unwrapped exception message does not match the original exception message.", + "Type: {0}{1}Expected: {2}{1}Actual: {3}{1}{1}This can happen if the exception has a contructor that manipulates the input string. You can work around this by creating a constructor that takes in a WrappedException which sets the message verbatim and optionally the inner exception.", + exception.GetType(), + Environment.NewLine, + Message, + exception.Message); + } + } + catch (Exception) + { } + } + return exception; + } + + private ConstructorInfo GetMatchingConstructor(params Type[] parameterTypes) + { + return this.Type.GetConstructor(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance, null, parameterTypes, null); + } + + private static Type LoadType(String typeName) + { + // For rest api version < 3.0, the server transmits the fulllAssemblyQualifiedName of exception at time that version was initially released, + // which means normal loading will fail due to version mismatch, as the version will alwyas be <= 14.0.0.0. + // Example: typeName=GitHub.Core.WebApi.ProjectDoesNotExistWithNameException, GitHub.Core.WebApi, Version=14.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a + + // For rest api version >= 3.0 (dev15), it just sends an assembly qualified type name without Version and PublicKeyToken, so it is version agnostic. + // Example: typeName=GitHub.Core.WebApi.ProjectDoesNotExistWithNameException, GitHub.Core.WebApi + + // Order of precedence, + // 1. Standard .net type loading + // 2. Check exception mapping attributes for compat scenarios with older severs + // 3. If version 14 is specified in typeName, resolve assembly by switching to version 15 + // 4. Try and load, binary from same folder as this binary + + Type ret = null; + + //try normal loading first + try + { + ret = Type.GetType(typeName, false, true); + } + catch (Exception) + { + // GetType can still throw an exception despite sending in false for throwOnError + } + + if (ret == null) + { + // try and look up type mapping based on exception attributes. + ret = LookupExceptionAttributeMapping(typeName); + if (ret == null) + { + try + { + //probably assembly version is wrong + //fortunately, .NET provides an overload for just such an eventuality + //without forcing us to parse the string + ret = Type.GetType(typeName, + ResolveAssembly, + null, + false, + true); + } + catch (Exception) + { + //we swallow all exceptions, some can potentially + //still occur, like BadImageFormat, even with throwOnError=false above + } + } + } + return ret; + } + + private static Assembly ResolveAssembly(AssemblyName asmName) + { + //if we get here we are probably in a back compat scenario + //check the version of the asmName, and if it is 14.0, upgrade it to + //the same as this assembly version and try it + if (asmName.Version == null || asmName.Version.Major <= c_backCompatVer) + { + //create new instance, don't copy unknown params... + AssemblyName newName = new AssemblyName + { + Name = asmName.Name, + CultureInfo = asmName.CultureInfo + }; + // DEVNOTE: Do not tack-on the version information, instead let the + // assembly load without it so that it may resolve to the appropriate. + // Otherwise, translation down the stack may fail due to version mismatch + // and that end's up creating un-necessary retries on certain user defined exceptions. + // newName.Version = Assembly.GetExecutingAssembly().GetName().Version; + newName.SetPublicKeyToken(asmName.GetPublicKeyToken()); + + try + { + var ret = Assembly.Load(newName); + if (ret != null) + { + return ret; + } + } + catch (Exception) + { } + } + + //Next,we look in the same directory, add .dll to the name and do a "LoadFrom" just + //like in the AssemblyResolve event in other places + //the assembly should be in the same directory as this one. + string currentPath = Assembly.GetExecutingAssembly().Location; + if (!String.IsNullOrEmpty(currentPath)) + { + string fullPath = Path.Combine(Path.GetDirectoryName(currentPath), asmName.Name + ".dll"); + + if (File.Exists(fullPath)) + { + return Assembly.LoadFrom(fullPath); + } + } + return null; + } + + private const int c_backCompatVer = 14; + + private static Type LookupExceptionAttributeMapping(string typeName) + { + Type mappedType = null; + Tuple cacheEntry = null; + lock (syncObject) + { + if (!s_exceptionsWithAttributeMapping.TryGetValue(typeName, out cacheEntry)) + { + // if not in the cache, then we should update the cache and try again + UpdateExceptionAttributeMappingCache(); + s_exceptionsWithAttributeMapping.TryGetValue(typeName, out cacheEntry); + } + } + if (cacheEntry != null) + { + mappedType = cacheEntry.Item2; + } + return mappedType; + } + + /// + /// Loop through all types in all loaded assemblies that we haven't looked at yet, and cache ExceptionMappingAttribute data + /// + private static void UpdateExceptionAttributeMappingCache() + { + foreach (Assembly assembly in AppDomain.CurrentDomain.GetAssemblies().Where(a => !s_assembliesCheckedForExceptionMappings.Contains(a))) + { + if (DoesAssemblyQualify(assembly)) // only look at assemblies that match this binary's major version and public key token + { + try + { + + IEnumerable types; + try + { + // calling GetTypes has side effect of loading direct dependancies of the assembly. + types = assembly.GetTypes(); + } + catch (ReflectionTypeLoadException ex) + { + // if dependant assembly fails to load, we should still be able to get all the exceptions, since it would be unlikely, + // that an exception is referencing a type from the assembly that failed to load. + types = ex.Types.Where(t => t != null); + } + + foreach (TypeInfo typeInfo in types) + { + foreach (ExceptionMappingAttribute attribute in typeInfo.GetCustomAttributes()) + { + Tuple cachedValue; + + // Check if the TypeName already exists in cache and add it if not. if it does exist, update if it has a higher ExclusiveMaxApiVersion. + // (In theory an old exception could be mapped to more then one type in the case we want the latest server + // to send different older types to different versions of older clients. This method is used only on client when converting a type + // from an older server, so we want the latest mapping of the older type.) + if (!s_exceptionsWithAttributeMapping.TryGetValue(attribute.TypeName, out cachedValue) || attribute.ExclusiveMaxApiVersion > cachedValue.Item1) + { + s_exceptionsWithAttributeMapping[attribute.TypeName] = new Tuple(attribute.ExclusiveMaxApiVersion, typeInfo.AsType()); + } + } + } + } + catch (Exception) + { + // if for any reason we can't get the defined types, we don't want an exception here to mask the real exception. + } + } + s_assembliesCheckedForExceptionMappings.Add(assembly); // keep track of all assemblies we have either ruled out or cached mappings for, so we don't have to consider them again + } + } + + /// + /// Checks Assembly to see if it has the possibility to contain an ExceptionMappingAttribute. Does this by matching the Version and PublicKeyToken + /// with the current executing assembly. + /// + /// + /// + private static bool DoesAssemblyQualify(Assembly assembly) + { + if (s_currentAssemblyPublicKeyToken == null || s_currentAssemblyVersion == null) + { + // cache these so we don't have to recompute every time we check an assembly + AssemblyName thisAssemblyName = typeof(WrappedException).GetTypeInfo().Assembly.GetName(); + s_currentAssemblyPublicKeyToken = thisAssemblyName.GetPublicKeyToken(); + s_currentAssemblyVersion = thisAssemblyName.Version; + } + AssemblyName assemblyName = assembly.GetName(); + if (assemblyName.Version.Major != s_currentAssemblyVersion.Major) + { + return false; + } + byte[] assemblyPublicKeyToken = assemblyName.GetPublicKeyToken(); + + // Allow the test code public key token as well, because we have an L0 test which declares an exception + // that has ExceptionMappingAttribute. + return ArrayUtility.Equals(s_currentAssemblyPublicKeyToken, assemblyPublicKeyToken) || + ArrayUtility.Equals(s_testCodePublicKeyToken, assemblyPublicKeyToken); + } + + private static object syncObject = new Object(); + private static byte[] s_currentAssemblyPublicKeyToken = null; + private static Version s_currentAssemblyVersion = null; + private static HashSet s_assembliesCheckedForExceptionMappings = new HashSet(); + private static readonly byte[] s_testCodePublicKeyToken = new byte[] { 0x68, 0x9d, 0x5c, 0x3b, 0x19, 0xaa, 0xe6, 0x23 }; + + /// + /// Exception Attribute Mapping Cache. key = exception type name from a response, value = ExclusiveMaxApiVersion and the mapped Type for that type name + /// + private static Dictionary> s_exceptionsWithAttributeMapping = new Dictionary>(); + + private void TryWrapCustomProperties(Exception exception) + { + var customPropertiesWithDataMemberAttribute = GetCustomPropertiesInfo(); + + if (customPropertiesWithDataMemberAttribute.Any()) + { + this.CustomProperties = new Dictionary(); + } + + foreach (var customProperty in customPropertiesWithDataMemberAttribute) + { + try + { + this.CustomProperties.Add(customProperty.Name, customProperty.GetValue(exception)); + } + catch + { + // skip this property + } + } + } + + private void TryUnWrapCustomProperties(Exception exception) + { + if (this.CustomProperties != null) + { + foreach (var property in GetCustomPropertiesInfo()) + { + if (this.CustomProperties.ContainsKey(property.Name)) + { + try + { + var propertyValue = JsonConvert.DeserializeObject(JsonConvert.SerializeObject(CustomProperties[property.Name]), property.PropertyType); + property.SetValue(exception, propertyValue); + } + catch + { + // skip this property + } + } + } + } + } + + private IEnumerable GetCustomPropertiesInfo() + { + return this.Type.GetTypeInfo().DeclaredProperties.Where(p => p.GetMethod.Attributes.HasFlag(MethodAttributes.Public) + && !p.GetMethod.Attributes.HasFlag(MethodAttributes.Static) + && p.CustomAttributes.Any(a => a.AttributeType.GetTypeInfo().IsAssignableFrom(typeof(DataMemberAttribute).GetTypeInfo()))); + } + + + #region ISecuredObject + Guid ISecuredObject.NamespaceId => throw new NotImplementedException(); + + int ISecuredObject.RequiredPermissions => throw new NotImplementedException(); + + string ISecuredObject.GetToken() => throw new NotImplementedException(); + #endregion + + // Exception translation rules which apply to all VssHttpClientBase subclasses + private static readonly IDictionary baseTranslatedExceptions = new Dictionary() + { + { "VssAccessCheckException", typeof(Security.AccessCheckException) } + }; + } +} diff --git a/src/Sdk/nuget.config b/src/Sdk/nuget.config new file mode 100644 index 00000000000..5a8826814d5 --- /dev/null +++ b/src/Sdk/nuget.config @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/src/Sync-Sdk.ps1 b/src/Sync-Sdk.ps1 new file mode 100644 index 00000000000..235b8402aa7 --- /dev/null +++ b/src/Sync-Sdk.ps1 @@ -0,0 +1,432 @@ +$ErrorActionPreference = "Stop" + +$runnerRepo = Read-Host -Prompt "actions/runner repository root" +if (!(Test-Path -LiteralPath "$runnerRepo/src")) { + Write-Error "$runnerRepo should contains a /src folder" + return 1 +} + +$gitHubSdkFolder = Join-Path -Path "$runnerRepo/src" -ChildPath "Sdk" + +$vsoRepo = $PWD +while ($true) { + if (Test-Path -LiteralPath "$vsoRepo/init.cmd") { + break; + } + else { + $vsoRepo = (Get-Item $vsoRepo).Parent.FullName + } +} + +$targetFolders = @( + # "Common" + # "WebApi" + # "AadAuthentication" + # "DTContracts" + # "DTGenerated" + # "DTLogging" + # "DTExpressions" + # "DTExpressions2" + # "DTObjectTemplating" + # "DTPipelines" + # "DTWebApi" + # "Resources" + # "BuildWebApi" + # "CoreWebApi" + # "ArtifactWebApi" + # "ArtifactContentTelemetry" + # "ArtifactContent" + # "BlobStoreWebApi" + # "BlobStoreCommonTelemetry" + # "BlobStoreCommon" +) + +$sourceFolders = @{ + # "Vssf\Client\Common" = "Common"; + # "Vssf\Client\WebApi" = "WebApi"; + # "DistributedTask\Shared\Common\Contracts" = "DTContracts"; + # "DistributedTask\Client\WebApi\Generated" = "DTGenerated"; + # "DistributedTask\Client\WebApi\Logging" = "DTLogging"; + # "DistributedTask\Client\WebApi\Expressions" = "DTExpressions"; + # "Actions\Runtime\Client\WebApi\Expressions2" = "DTExpressions2"; + # "Actions\Runtime\Client\WebApi\ObjectTemplating" = "DTObjectTemplating"; + # "Actions\Runtime\Client\WebApi\Pipelines" = "DTPipelines"; + # "DistributedTask\Client\WebApi\WebApi" = "DTWebApi"; + # "..\obj\Debug.AnyCPU\Vssf.Client\MS.VS.Services.Common\EmbeddedVersionInfo.cs" = "Common\EmbeddedVersionInfo.cs"; + # "Vssf\InteractiveClient\Client\Authentication\VssAadToken.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\Authentication\VssAadTokenProvider.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\Authentication\VssAadCredential.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\VssAadSettings.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\Authentication\VssFederatedCredential.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\Authentication\VssFederatedToken.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\Authentication\VssFederatedTokenProvider.cs" = "AadAuthentication"; + # "Vssf\InteractiveClient\Client\Authentication\Utility\CookieUtility.cs" = "AadAuthentication"; + # "Actions\Runtime\Client\WebApi\Pipelines\ObjectTemplating\workflow-v1.0.json" = "DTPipelines"; + # "Tfs\Client\Build2\Api" = "BuildWebApi"; + # "Tfs\Client\Core" = "CoreWebApi"; + # "ArtifactServices\Client\WebApi" = "ArtifactWebApi"; + # "ArtifactServices\Shared\Content.Common.Telemetry" = "ArtifactContentTelemetry"; + # "ArtifactServices\Shared\Content.Common" = "ArtifactContent"; + # "BlobStore\Client\WebApi" = "BlobStoreWebApi"; + # "ArtifactServices\Shared\BlobStore.Common.Telemetry" = "BlobStoreCommonTelemetry"; + # "ArtifactServices\Shared\BlobStore.Common" = "BlobStoreCommon"; +} + +$extraFiles = @( + # "BlobStoreCommon\BlobStore.Common\AzureStorageOperationTraceAdapter.cs" + # "BlobStoreCommon\BlobStore.Common\BlobIdentifierHelperExtensions.cs" + # "BlobStoreCommon\BlobStore.Common\BlobIdentifierHexConverter.cs" + # "BlobStoreCommon\BlobStore.Common\EdgeCacheUrlBuilder.cs" + # "BlobStoreCommon\BlobStore.Common\Exceptions.cs" + # "BlobStoreCommon\BlobStore.Common\IDownloader.cs" + # "BlobStoreCommon\BlobStore.Common\InternalsVisibleTo.cs" + # "BlobStoreCommon\BlobStore.Common\IUrlSigner.cs" + # "BlobStoreCommon\BlobStore.Common\ManagedParallelBlobDownloader.cs" + # "BlobStoreCommon\BlobStore.Common\NullableExtensions.cs" + # "BlobStoreCommon\BlobStore.Common\ObjectExtensions.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\InstrumentationManifest.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\InstrumentationManifestException.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\IPerformanceDataFacade.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\ManifestCounters.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\NoopPerfCounter.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\NoopPerformanceDataFacade.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\PerfCounter.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\PerfCounterSet.cs" + # "BlobStoreCommon\BlobStore.Common\PerfCounters\PerformanceDataFacade.cs" + # "BlobStoreCommon\BlobStore.Common\ReceiptSecretConstants.cs" + # "BlobStoreCommon\BlobStore.Common\SecurityDefinitions.cs" + # "BlobStoreCommon\BlobStore.Common\VsoHashAlgorithm.cs" + # "BlobStoreCommonTelemetry\BlobStore.Common.Telemetry\InternalsVisibleTo.cs" + # "ArtifactContentTelemetry\Content.Common.Telemetry\InternalsVisibleTo.cs" + # "ArtifactContentTelemetry\Content.Common.Telemetry\Telemetry\NoopClientTelemetry.cs" + # "ArtifactContentTelemetry\Content.Common.Telemetry\Telemetry\TelemetryPlatformSpecificNetFramework.cs" + # "ArtifactContentTelemetry\Content.Common.Telemetry\Telemetry\TelemetryEnvironmentHelper.cs" + # "ArtifactContent\Content.Common\AsyncEnumerator\AsyncEnumeratorExceptionWrapper.cs" + # "ArtifactContent\Content.Common\AsyncEnumerator\IEnumeratorExceptionMapper.cs" + # "ArtifactContent\Content.Common\AsyncEnumerator\AsyncEnumeratorWithCursor.cs" + # "ArtifactContent\Content.Common\Authentication\AadAcquireTokenException.cs" + # "ArtifactContent\Content.Common\Authentication\AadErrorHandlingPolicy.cs" + # "ArtifactContent\Content.Common\Authentication\CredentialProvider\CredentialProviderException.cs" + # "ArtifactContent\Content.Common\Authentication\CredentialProvider\CredentialProviderLoader.cs" + # "ArtifactContent\Content.Common\Authentication\CredentialProvider\CredentialProviderManager.cs" + # "ArtifactContent\Content.Common\Authentication\CredentialProvider\CredentialResponse.cs" + # "ArtifactContent\Content.Common\Authentication\CredentialProvider\ICredentialProvider.cs" + # "ArtifactContent\Content.Common\Authentication\CredentialProvider\PluginCredentialProvider.cs" + # "ArtifactContent\Content.Common\Authentication\LocalTokenCacheArgs.cs" + # "ArtifactContent\Content.Common\Authentication\TestableAuthenticationContext.cs" + # "ArtifactContent\Content.Common\Authentication\VsoAadConstants.cs" + # "ArtifactContent\Content.Common\Authentication\VsoCredentialHelper.cs" + # "ArtifactContent\Content.Common\AutoKillProcessHandle.cs" + # "ArtifactContent\Content.Common\ConcurrencyConsolidator.cs" + # "ArtifactContent\Content.Common\EnumUtilities.cs" + # "ArtifactContent\Content.Common\EquatableTuple.cs" + # "ArtifactContent\Content.Common\FileVersionHelpers.cs" + # "ArtifactContent\Content.Common\Histogram.cs" + # "ArtifactContent\Content.Common\InternalsVisibleTo.cs" + # "ArtifactContent\Content.Common\InUseDetection.cs" + # "ArtifactContent\Content.Common\IteratorPartition.cs" + # "ArtifactContent\Content.Common\Json\ByteArrayAsBase64JsonConvertor.cs" + # "ArtifactContent\Content.Common\Json\ByteArrayAsHexJsonConvertor.cs" + # "ArtifactContent\Content.Common\Json\JsonEnumerator.cs" + # "ArtifactContent\Content.Common\Json\JsonNormalizer.cs" + # "ArtifactContent\Content.Common\Json\JsonProperty.cs" + # "ArtifactContent\Content.Common\Json\JsonStream.cs" + # "ArtifactContent\Content.Common\Json\JsonWrite.cs" + # "ArtifactContent\Content.Common\Json\JsonWriterStream.cs" + # "ArtifactContent\Content.Common\Json\JsonWrites.cs" + # "ArtifactContent\Content.Common\Json\ULongJsonConverter.cs" + # "ArtifactContent\Content.Common\Kvp.cs" + # "ArtifactContent\Content.Common\Operations\SecureStringConverter.cs" + # "ArtifactContent\Content.Common\PagedEnumerator.cs" + # "ArtifactContent\Content.Common\PerformanceInfo.cs" + # "ArtifactContent\Content.Common\ReaderWriterLockSlimExtensions.cs" + # "ArtifactContent\Content.Common\ReadOnlySet.cs" + # "ArtifactContent\Content.Common\RetrievalOptions.cs" + # "ArtifactContent\Content.Common\ServiceInstanceTypes.cs" + # "ArtifactContent\Content.Common\ServicePointHelper.cs" + # "ArtifactContent\Content.Common\ShardableLocator.cs" + # "ArtifactContent\Content.Common\StringExtensions.cs" + # "ArtifactContent\Content.Common\ThreadLocalRandom.cs" + # "ArtifactContent\Content.Common\ThreadPoolHelper.cs" + # "ArtifactContent\Content.Common\Tracing\AppTraceListener.cs" + # "ArtifactContent\Content.Common\Tracing\ArtifactServicesTraceSource.cs" + # "ArtifactContent\Content.Common\Tracing\ConsoleMessageUtil.cs" + # "ArtifactContent\Content.Common\Tracing\ConsoleTraceListener.cs" + # "ArtifactContent\Content.Common\Tracing\FileTraceListener.cs" + # "ArtifactContent\Content.Common\Tracing\InMemoryLog.cs" + # "ArtifactContent\Content.Common\Tracing\InMemoryTraceListener.cs" + # "Common\Common\CommandLine\Argument.cs" + # "Common\Common\CommandLine\AttributeBasedOperationModeHandlerFactory.cs" + # "Common\Common\CommandLine\AttributeBasedOptionParserAdapter.cs" + # "Common\Common\CommandLine\BasicParser.cs" + # "Common\Common\CommandLine\CommandLineLexer.cs" + # "Common\Common\CommandLine\Enumerations.cs" + # "Common\Common\CommandLine\Exceptions.cs" + # "Common\Common\CommandLine\Extensions.cs" + # "Common\Common\CommandLine\IEnumerable.cs" + # "Common\Common\CommandLine\OperationHandler.cs" + # "Common\Common\CommandLine\OperationHandlerFactory.cs" + # "Common\Common\CommandLine\OperationModeAttribute.cs" + # "Common\Common\CommandLine\Option.cs" + # "Common\Common\CommandLine\OptionAttribute.cs" + # "Common\Common\CommandLine\OptionParser.cs" + # "Common\Common\CommandLine\OptionReader.cs" + # "Common\Common\CommandLine\ResponseFileOptionReader.cs" + # "Common\Common\CommandLine\Validation\DefaultValidation.cs" + # "Common\Common\CommandLine\Validation\IOptionValidation.cs" + # "Common\Common\CommandLine\Validation\OptionExistsFilter.cs" + # "Common\Common\CommandLine\Validation\OptionMustExist.cs" + # "Common\Common\CommandLine\Validation\OptionRequiresSpecificValue.cs" + # "Common\Common\CommandLine\Validation\OptionsAreMutuallyExclusive.cs" + # "Common\Common\CommandLine\Validation\OptionsAreMutuallyInclusive.cs" + # "Common\Common\CommandLine\Validation\OptionValidation.cs" + # "Common\Common\CommandLine\Validation\OptionValidationFilter.cs" + # "Common\Common\CommandLine\Validation\OptionValueFilter.cs" + # "Common\Common\CommandLine\ValueConverters\CsvCollectionConverter.cs" + # "Common\Common\CommandLine\ValueConverters\EnumConverter.cs" + # "Common\Common\CommandLine\ValueConverters\IValueConvertible.cs" + # "Common\Common\CommandLine\ValueConverters\UriConverter.cs" + # "Common\Common\CommandLine\ValueConverters\ValueConverter.cs" + # "Common\Common\ExternalProviders\IExternalProviderHttpRequester.cs" + # "Common\Common\Performance\PerformanceNativeMethods.cs" + # "Common\Common\TokenStorage\RegistryToken.cs" + # "Common\Common\TokenStorage\RegistryTokenStorage.cs" + # "Common\Common\TokenStorage\RegistryTokenStorageHelper.cs" + # "Common\Common\TokenStorage\VssTokenStorageFactory.cs" + # "Common\Common\Utility\CredentialsCacheManager.cs" + # "Common\Common\Utility\EncryptionUtility.cs" + # "Common\Common\Utility\EnumerableUtility.cs" + # "Common\Common\Utility\EnvironmentWrapper.cs" + # "Common\Common\Utility\ExceptionExtentions.cs" + # "Common\Common\Utility\NativeMethods.cs" + # "Common\Common\Utility\OSDetails.cs" + # "Common\Common\Utility\DateTimeUtility.cs" + # "Common\Common\Utility\PasswordUtility.cs" + # "Common\Common\Utility\RegistryHelper.cs" + # "Common\Common\Utility\SerializationHelper.cs" + # "Common\Common\Utility\Csv\CsvException.cs" + # "Common\Common\Utility\Csv\CsvConfiguration.cs" + # "Common\Common\Utility\Csv\CsvWriter.cs" + # "Common\Common\VssEnvironment.cs" + # "WebApi\WebApi\AssemblyAttributes.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\ExpiringToken.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\Comparers\ExternalGitIssueComparer.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\ExternalGitExtensions.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\Comparers\ExternalGitPullRequestComparer.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\Comparers\ExternalGitCommitComparer.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\ExternalGitIssueEvent.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\Comparers\ExternalGitRepoComparer.cs" + # "WebApi\WebApi\Contracts\ExternalEvent\ExternalGitCommitCommentEvent.cs" + # "WebApi\WebApi\Contracts\PermissionLevel\Client\PagedPermissionLevelAssignment.cs" + # "WebApi\WebApi\Contracts\PermissionLevel\Client\PermissionLevelAssignment.cs" + # "WebApi\WebApi\Contracts\PermissionLevel\Enumerations.cs" + # "WebApi\WebApi\Contracts\PermissionLevel\Client\PermissionLevelDefinition.cs" + # "WebApi\WebApi\Contracts\Tokens\PATAddedEvent.cs" + # "WebApi\WebApi\Contracts\Tokens\SshKeyAddedEvent.cs" + # "WebApi\WebApi\Contracts\Tokens\ExpiringTokenEvent.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\PATAddedEvent.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\SshKeyAddedEvent.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\ExpiringTokenEvent.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\DelegatedAuthMigrationStatus.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\DelegatedAuthorizationMigrationBase.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedAuthorizationAccessKeyPublicDataMigration.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedAuthorizationAccessMigration.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedAuthorizationMigration.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedAuthorizationAccessKeyMigration.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedAuthorizationRegistrationMigration.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedAuthorizationRegistrationRedirectLocationMigration.cs" + # "WebApi\WebApi\Contracts\DelegatedAuthorization\Migration\TokenDelegatedHostAuthorizationMigration.cs" + # "WebApi\WebApi\Contracts\OAuthWhitelist\OAuthWhitelistEntry.cs" + # "WebApi\WebApi\Contracts\TokenAdmin\PatRevokedEvent.cs" + # "WebApi\WebApi\Contracts\TokenAdmin\TokenAdministrationRevocation.cs" + # "WebApi\WebApi\Contracts\TokenAdmin\TokenAdminPagedSessionTokens.cs" + # "WebApi\WebApi\Contracts\TokenAdmin\TokenAdminRevocation.cs" + # "WebApi\WebApi\Contracts\TokenAdmin\TokenAdminRevocationRule.cs" + # "WebApi\WebApi\Exceptions\AuditLogExceptions.cs" + # "WebApi\WebApi\Exceptions\AadExceptions.cs" + # "WebApi\WebApi\Exceptions\PermissionLevelExceptions.cs" + # "WebApi\WebApi\HttpClients\CsmResourceProviderHttpClient.cs" + # "WebApi\WebApi\HttpClients\Generated\CsmResourceProviderHttpClientBase.cs" + # "WebApi\WebApi\HttpClients\Generated\OAuthWhitelistHttpClient.cs" + # "WebApi\WebApi\HttpClients\Generated\TokenAdminHttpClient.cs" + # "WebApi\WebApi\HttpClients\Generated\TokenAdministrationHttpClient.cs" + # "WebApi\WebApi\HttpClients\Generated\TokenExpirationHttpClient.cs" + # "WebApi\WebApi\HttpClients\Generated\TokenMigrationHttpClient.cs" + # "WebApi\WebApi\HttpClients\Generated\PermissionLevelHttpClient.cs" + # "WebApi\WebApi\HttpClients\CommerceHostHelperHttpClient.cs" + # "WebApi\WebApi\Utilities\DelegatedAuthComparers.cs" + # "WebApi\WebApi\Utilities\HttpHeadersExtensions.cs" + # "WebApi\WebApi\VssClientCertificateManager.cs" + # "WebApi\WebApi\VssClientEnvironment.cs" + # "WebApi\WebApi\VssSoapMediaTypeFormatter.cs" +) + +$resourceFiles = @{ + # "ExpressionResources" = "DistributedTask\Client\WebApi\Expressions\ExpressionResources.resx"; + # "PipelineStrings" = "DistributedTask\Client\WebApi\Pipelines\PipelineStrings.resx"; + # "CommonResources" = "Vssf\Client\Common\Resources.resx"; + # "IdentityResources" = "Vssf\Client\WebApi\Resources\IdentityResources.resx"; + # "JwtResources" = "Vssf\Client\WebApi\Resources\JwtResources.resx"; + # "WebApiResources" = "Vssf\Client\WebApi\Resources\WebApiResources.resx"; + # "DataImportResources" = "Vssf\Client\WebApi\Resources\DataImportResources.resx"; + # "PatchResources" = "Vssf\Client\WebApi\Resources\PatchResources.resx"; + # "AccountResources" = "Vssf\Client\WebApi\Resources\AccountResources.resx"; + # "TemplateStrings" = "DistributedTask\Client\WebApi\ObjectTemplating\TemplateStrings.resx"; + # "GraphResources" = "Vssf\Client\WebApi\Resources\GraphResources.resx"; + # "FileContainerResources" = "Vssf\Client\WebApi\Resources\FileContainerResources.resx"; + # "LocationResources" = "Vssf\Client\WebApi\Resources\LocationResources.resx"; + # "CommerceResources" = "Vssf\Client\WebApi\Resources\CommerceResources.resx"; + # "SecurityResources" = "Vssf\Client\WebApi\Resources\SecurityResources.resx"; + # "WebPlatformResources" = "Vssf\Client\WebApi\Resources\WebPlatformResources.resx"; + # "ZeusWebApiResources" = "Vssf\Client\WebApi\Resources\ZeusWebApiResources.resx"; + # "NameResolutionResources" = "Vssf\Client\WebApi\Resources\NameResolutionResources.resx"; + # "PartitioningResources" = "Vssf\Client\WebApi\Resources\PartitioningResources.resx"; + # "WebApiResources" = "Tfs\Client\Core\Resources\WebApiResources.resx"; + # "BlobStoreResources" = "BlobStore\Client\WebApi\Resources.resx" + # "ContentResources" = "ArtifactServices\Shared\Content.Common\Resources.resx" + # "BlobStoreCommonResources" = "ArtifactServices\Shared\BlobStore.Common\Resources.resx" +} + +$resourceNamespace = @{ + # "ExpressionResources" = "Microsoft.TeamFoundation.DistributedTask.Expressions"; + # "PipelineStrings" = "Microsoft.TeamFoundation.DistributedTask.Pipelines"; + # "CommonResources" = "Microsoft.VisualStudio.Services.Common.Internal"; + # "IdentityResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "JwtResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "WebApiResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "DataImportResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "PatchResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "AccountResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "TemplateStrings" = "Microsoft.TeamFoundation.DistributedTask.ObjectTemplating"; + # "GraphResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "FileContainerResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "LocationResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "CommerceResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "SecurityResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "WebPlatformResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "ZeusWebApiResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "NameResolutionResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "PartitioningResources" = "Microsoft.VisualStudio.Services.WebApi"; + # "WebApiResources" = "Microsoft.TeamFoundation.Core.WebApi"; + # "ContentResources" = "Microsoft.VisualStudio.Services.Content.Common"; + # "BlobStoreCommonResources" = "Microsoft.VisualStudio.Services.BlobStore.Common"; + # "BlobStoreResources" = "Microsoft.VisualStudio.Services.BlobStore.WebApi"; +} + +foreach ($folder in $targetFolders) { + Write-Host "Recreate $gitHubSdkFolder\$folder" + + if (Test-Path -LiteralPath "$gitHubSdkFolder\$folder") { + Remove-Item -LiteralPath "$gitHubSdkFolder\$folder" -Force -Recurse + } + New-Item -Path $gitHubSdkFolder -Name $folder -ItemType "directory" -Force +} + +foreach ($sourceFolder in $sourceFolders.Keys) { + $copySource = Join-Path -Path $vsoRepo -ChildPath $sourceFolder + $copyDest = Join-Path -Path $gitHubSdkFolder -ChildPath $sourceFolders[$sourceFolder] + + Write-Host "Copy $copySource to $copyDest" + + Copy-Item -Path $copySource -Destination $copyDest -Filter "*.cs" -Recurse -Force +} + +Write-Host "Delete extra none NetStandard files" +foreach ($extraFile in $extraFiles) { + Remove-Item -LiteralPath "$gitHubSdkFolder\$extraFile" -Force +} + +Write-Host "Generate C# file for resx files" +foreach ($resourceFile in $resourceFiles.Keys) { + Write-Host "Generate file for $resourceFile" + $stringBuilder = New-Object System.Text.StringBuilder + $file = $resourceFiles[$resourceFile] + $xml = [xml](Get-Content -LiteralPath "$vsoRepo\$file") + $null = $stringBuilder.AppendLine('using System.Globalization;') + $null = $stringBuilder.AppendLine('') + $namespace = $resourceNamespace[$resourceFile] + $null = $stringBuilder.AppendLine("namespace $namespace") + $null = $stringBuilder.AppendLine('{') + $null = $stringBuilder.AppendLine(" public static class $resourceFile") + $null = $stringBuilder.AppendLine(' {') + foreach ($data in $xml.root.data) { + $i = 0 + $args = "" + $inputs = "" + while ($true) { + if ($data.value.Contains("{$i}") -or $data.value.Contains("{$i" + ":")) { + if ($i -eq 0) { + $args = "object arg$i" + $inputs = "arg$i" + } + else { + $args = $args + ", " + "object arg$i" + $inputs = $inputs + ", " + "arg$i" + } + $i++ + } + else { + break + } + } + + $null = $stringBuilder.AppendLine("") + $null = $stringBuilder.AppendLine(" public static string $($data.name)($($args))") + $null = $stringBuilder.AppendLine(" {") + $null = $stringBuilder.AppendLine(@" + const string Format = @"$($data.value.Replace('"', '""'))"; +"@) + if ($i -eq 0) { + $null = $stringBuilder.AppendLine(" return Format;") + } + else { + $null = $stringBuilder.AppendLine(" return string.Format(CultureInfo.CurrentCulture, Format, $inputs);") + } + $null = $stringBuilder.AppendLine(" }") + } + + $null = $stringBuilder.AppendLine(" }") + $null = $stringBuilder.AppendLine("}") + + # Write Resources.g.cs. + $genResourceFile = Join-Path -Path $gitHubSdkFolder -ChildPath "Resources\$resourceFile.g.cs" + [System.IO.File]::WriteAllText($genResourceFile, ($stringBuilder.ToString()), ([System.Text.Encoding]::UTF8)) +} + +# Print out all namespaces +Write-Host "Rename namespaces:" +$namespaces = New-Object 'System.Collections.Generic.HashSet[string]' +$sourceFiles = Get-ChildItem -LiteralPath $gitHubSdkFolder -Filter "*.cs" -Recurse -Force -File +foreach ($file in $sourceFiles) { + foreach ($line in Get-Content $file.FullName) { + if ($line.StartsWith("namespace ")) { + $namespace = $line.Substring("namespace ".Length) + if ($namespaces.Add($namespace)) { + Write-Host $namespace + } + } + } +} + +# Rename all namespaces to GitHub +$allSourceFiles = Get-ChildItem -LiteralPath $gitHubSdkFolder -Filter "*.cs" -Recurse -Force -File +foreach ($file in $allSourceFiles) { + $stringBuilder = New-Object System.Text.StringBuilder + foreach ($line in Get-Content $file.FullName) { + if ($line.Contains("Microsoft.VisualStudio")) { + $line = $line.Replace("Microsoft.VisualStudio", "GitHub"); + } + elseif ($line.Contains("Microsoft.Azure.DevOps")) { + $line = $line.Replace("Microsoft.Azure.DevOps", "GitHub"); + } + elseif ($line.Contains("Microsoft.TeamFoundation")) { + $line = $line.Replace("Microsoft.TeamFoundation", "GitHub"); + } + + $null = $stringBuilder.AppendLine($line) + } + + [System.IO.File]::WriteAllText($file.FullName, ($stringBuilder.ToString()), ([System.Text.Encoding]::UTF8)) +} + +Write-Host "Done" \ No newline at end of file diff --git a/src/Test/L0/CommandLineParserL0.cs b/src/Test/L0/CommandLineParserL0.cs new file mode 100644 index 00000000000..d83aef58d69 --- /dev/null +++ b/src/Test/L0/CommandLineParserL0.cs @@ -0,0 +1,127 @@ +using Moq; +using System.Runtime.CompilerServices; +using Xunit; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class CommandLineParserL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void CanConstruct() + { + using (TestHostContext hc = CreateTestContext()) + { + Tracing trace = hc.GetTrace(); + + CommandLineParser clp = new CommandLineParser(hc, secretArgNames: new string[0]); + trace.Info("Constructed"); + + Assert.NotNull(clp); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void MasksSecretArgs() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + CommandLineParser clp = new CommandLineParser( + hc, + secretArgNames: new[] { "SecretArg1", "SecretArg2" }); + + // Assert. + clp.Parse(new string[] + { + "cmd", + "--secretarg1", + "secret value 1", + "--publicarg", + "public arg value", + "--secretarg2", + "secret value 2", + }); + + // Assert. + Assert.Equal(hc.SecretMasker.MaskSecrets("secret value 1"), "***"); + Assert.Equal(hc.SecretMasker.MaskSecrets("secret value 2"), "***"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ParsesCommands() + { + using (TestHostContext hc = CreateTestContext()) + { + Tracing trace = hc.GetTrace(); + + CommandLineParser clp = new CommandLineParser(hc, secretArgNames: new string[0]); + trace.Info("Constructed."); + + clp.Parse(new string[] { "cmd1", "cmd2", "--arg1", "arg1val", "badcmd" }); + trace.Info("Parsed"); + + trace.Info("Commands: {0}", clp.Commands.Count); + Assert.True(clp.Commands.Count == 2); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ParsesArgs() + { + using (TestHostContext hc = CreateTestContext()) + { + Tracing trace = hc.GetTrace(); + + CommandLineParser clp = new CommandLineParser(hc, secretArgNames: new string[0]); + trace.Info("Constructed."); + + clp.Parse(new string[] { "cmd1", "--arg1", "arg1val", "--arg2", "arg2val" }); + trace.Info("Parsed"); + + trace.Info("Args: {0}", clp.Args.Count); + Assert.True(clp.Args.Count == 2); + Assert.True(clp.Args.ContainsKey("arg1")); + Assert.Equal(clp.Args["arg1"], "arg1val"); + Assert.True(clp.Args.ContainsKey("arg2")); + Assert.Equal(clp.Args["arg2"], "arg2val"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ParsesFlags() + { + using (TestHostContext hc = CreateTestContext()) + { + Tracing trace = hc.GetTrace(); + + CommandLineParser clp = new CommandLineParser(hc, secretArgNames: new string[0]); + trace.Info("Constructed."); + + clp.Parse(new string[] { "cmd1", "--flag1", "--arg1", "arg1val", "--flag2" }); + trace.Info("Parsed"); + + trace.Info("Args: {0}", clp.Flags.Count); + Assert.True(clp.Flags.Count == 2); + Assert.True(clp.Flags.Contains("flag1")); + Assert.True(clp.Flags.Contains("flag2")); + } + } + + private TestHostContext CreateTestContext([CallerMemberName] string testName = "") + { + TestHostContext hc = new TestHostContext(this, testName); + return hc; + } + } +} diff --git a/src/Test/L0/ConstantGenerationL0.cs b/src/Test/L0/ConstantGenerationL0.cs new file mode 100644 index 00000000000..7e9fcb0d31c --- /dev/null +++ b/src/Test/L0/ConstantGenerationL0.cs @@ -0,0 +1,29 @@ +using System.Collections.Generic; +using GitHub.Runner.Sdk; +using Xunit; + + +namespace GitHub.Runner.Common.Tests +{ + public sealed class ConstantGenerationL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public void BuildConstantGenerateSucceed() + { + List validPackageNames = new List() + { + "win-x64", + "win-x86", + "linux-x64", + "linux-arm", + "rhel.6-x64", + "osx-x64" + }; + + Assert.True(BuildConstants.Source.CommitHash.Length == 40, $"CommitHash should be SHA-1 hash {BuildConstants.Source.CommitHash}"); + Assert.True(validPackageNames.Contains(BuildConstants.RunnerPackage.PackageName), $"PackageName should be one of the following '{string.Join(", ", validPackageNames)}', current PackageName is '{BuildConstants.RunnerPackage.PackageName}'"); + } + } +} diff --git a/src/Test/L0/Container/ContainerInfoL0.cs b/src/Test/L0/Container/ContainerInfoL0.cs new file mode 100644 index 00000000000..5873652ad1d --- /dev/null +++ b/src/Test/L0/Container/ContainerInfoL0.cs @@ -0,0 +1,38 @@ +using System; +using GitHub.Runner.Worker.Container; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker.Container +{ + public sealed class ContainerInfoL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void MountVolumeConstructorParsesStringInput() + { + // Arrange + MountVolume target = new MountVolume("/dst/dir"); // Maps anonymous Docker volume into target dir + MountVolume source_target = new MountVolume("/src/dir:/dst/dir"); // Maps source to target dir + MountVolume target_ro = new MountVolume("/dst/dir:ro"); + MountVolume source_target_ro = new MountVolume("/src/dir:/dst/dir:ro"); + + // Assert + Assert.Null(target.SourceVolumePath); + Assert.Equal("/dst/dir", target.TargetVolumePath); + Assert.False(target.ReadOnly); + + Assert.Equal("/src/dir", source_target.SourceVolumePath); + Assert.Equal("/dst/dir", source_target.TargetVolumePath); + Assert.False(source_target.ReadOnly); + + Assert.Null(target_ro.SourceVolumePath); + Assert.Equal("/dst/dir", target_ro.TargetVolumePath); + Assert.True(target_ro.ReadOnly); + + Assert.Equal("/src/dir", source_target_ro.SourceVolumePath); + Assert.Equal("/dst/dir", source_target_ro.TargetVolumePath); + Assert.True(source_target_ro.ReadOnly); + } + } +} diff --git a/src/Test/L0/Container/DockerUtilL0.cs b/src/Test/L0/Container/DockerUtilL0.cs new file mode 100644 index 00000000000..3d3d43ca8c7 --- /dev/null +++ b/src/Test/L0/Container/DockerUtilL0.cs @@ -0,0 +1,130 @@ +using System; +using System.Collections.Generic; +using GitHub.Runner.Worker.Container; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker.Container +{ + public sealed class DockerUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void RegexParsesDockerPort() + { + // Arrange + var dockerPortOutput0 = new List(); + var dockerPortOutput1 = new List + { + "80/tcp -> 0.0.0.0:32881" + }; + var dockerPortOutput1Empty = new List + { + "" + }; + var dockerPortOutput2 = new List + { + "80/tcp -> 0.0.0.0:32881", + "6379/tcp -> 0.0.0.0:32882" + }; + + // Act + var result0 = DockerUtil.ParseDockerPort(dockerPortOutput0); + var result1 = DockerUtil.ParseDockerPort(dockerPortOutput1); + var result1Empty = DockerUtil.ParseDockerPort(dockerPortOutput1Empty); + var result2 = DockerUtil.ParseDockerPort(dockerPortOutput2); + + // Assert + Assert.NotNull(result0); + Assert.Equal(result0.Count, 0); + + Assert.NotNull(result1); + Assert.Equal(result1.Count, 1); + var result1Port80Mapping = result1.Find(pm => + string.Equals(pm.ContainerPort, "80") && + string.Equals(pm.HostPort, "32881") && + string.Equals(pm.Protocol, "tcp", StringComparison.OrdinalIgnoreCase) + ); + Assert.NotNull(result1Port80Mapping); + + Assert.NotNull(result1Empty); + Assert.Equal(result1Empty.Count, 0); + + Assert.NotNull(result2); + Assert.Equal(result2.Count, 2); + var result2Port80Mapping = result2.Find(pm => + string.Equals(pm.ContainerPort, "80") && + string.Equals(pm.HostPort, "32881") && + string.Equals(pm.Protocol, "tcp", StringComparison.OrdinalIgnoreCase) + ); + Assert.NotNull(result2Port80Mapping); + var result2Port6379Mapping = result2.Find(pm => + string.Equals(pm.ContainerPort, "6379") && + string.Equals(pm.HostPort, "32882") && + string.Equals(pm.Protocol, "tcp", StringComparison.OrdinalIgnoreCase) + ); + Assert.NotNull(result2Port6379Mapping); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void RegexParsesPathFromDockerConfigEnv() + { + // Arrange + var configOutput0 = new List + { + "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "MY_VAR=test" + }; + var configOutput1 = new List + { + "PATH=/bad idea:/really,bad,idea:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "MY_VAR=test" + }; + var configOutput2 = new List(); + var configOutput3 = new List + { + "NOT_A_PATH=/bad idea:/really,bad,idea:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "MY_VAR=test" + }; + var configOutput4 = new List + { + "PATH", + "PATH=" + }; + var configOutput5 = new List + { + "PATH=/foo/bar:/baz", + "Path=/no/where" + }; + + // Act + var result0 = DockerUtil.ParsePathFromConfigEnv(configOutput0); + var result1 = DockerUtil.ParsePathFromConfigEnv(configOutput1); + var result2 = DockerUtil.ParsePathFromConfigEnv(configOutput2); + var result3 = DockerUtil.ParsePathFromConfigEnv(configOutput3); + var result4 = DockerUtil.ParsePathFromConfigEnv(configOutput4); + var result5 = DockerUtil.ParsePathFromConfigEnv(configOutput5); + + // Assert + Assert.NotNull(result0); + Assert.Equal("/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", result0); + + Assert.NotNull(result1); + Assert.Equal("/bad idea:/really,bad,idea:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", result1); + + Assert.NotNull(result2); + Assert.Equal("", result2); + + Assert.NotNull(result3); + Assert.Equal("", result3); + + Assert.NotNull(result4); + Assert.Equal("", result4); + + Assert.NotNull(result5); + Assert.Equal("/foo/bar:/baz", result5); + } + } +} diff --git a/src/Test/L0/DotnetsdkDownloadScriptL0.cs b/src/Test/L0/DotnetsdkDownloadScriptL0.cs new file mode 100644 index 00000000000..25646c14792 --- /dev/null +++ b/src/Test/L0/DotnetsdkDownloadScriptL0.cs @@ -0,0 +1,58 @@ +using Xunit; +using System.IO; +using System.Net.Http; +using System.Threading.Tasks; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class DotnetsdkDownloadScriptL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async Task EnsureDotnetsdkBashDownloadScriptUpToDate() + { + string shDownloadUrl = "https://dot.net/v1/dotnet-install.sh"; + + using (HttpClient downloadClient = new HttpClient()) + { + var response = await downloadClient.GetAsync("https://www.bing.com"); + if (!response.IsSuccessStatusCode) + { + return; + } + + string shScript = await downloadClient.GetStringAsync(shDownloadUrl); + + string existingShScript = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.sh")); + + bool shScriptMatched = string.Equals(shScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingShScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n")); + Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async Task EnsureDotnetsdkPowershellDownloadScriptUpToDate() + { + string ps1DownloadUrl = "https://dot.net/v1/dotnet-install.ps1"; + + using (HttpClient downloadClient = new HttpClient()) + { + var response = await downloadClient.GetAsync("https://www.bing.com"); + if (!response.IsSuccessStatusCode) + { + return; + } + + string ps1Script = await downloadClient.GetStringAsync(ps1DownloadUrl); + + string existingPs1Script = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.ps1")); + + bool ps1ScriptMatched = string.Equals(ps1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingPs1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n")); + Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1"); + } + } + } +} diff --git a/src/Test/L0/ExtensionManagerL0.cs b/src/Test/L0/ExtensionManagerL0.cs new file mode 100644 index 00000000000..46b69c750ef --- /dev/null +++ b/src/Test/L0/ExtensionManagerL0.cs @@ -0,0 +1,62 @@ +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Worker; +using System; +using System.Collections.Generic; +using System.Linq; +using Xunit; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class ExtensionManagerL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void LoadsTypeFromString() + { + using (TestHostContext tc = new TestHostContext(this)) + { + // Arrange. + var manager = new ExtensionManager(); + manager.Initialize(tc); + + // Act. + List extensions = manager.GetExtensions(); + + // Assert. + Assert.True( + extensions.Any(x => x is RunnerCapabilitiesProvider), + $"Expected {nameof(RunnerCapabilitiesProvider)} extension to be returned as a job extension."); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void LoadsTypes() + { + using (TestHostContext tc = new TestHostContext(this)) + { + // Arrange. + var manager = new ExtensionManager(); + manager.Initialize(tc); + + // Act/Assert. + AssertContains( + manager, + concreteType: typeof(GitHub.Runner.Common.Capabilities.RunnerCapabilitiesProvider)); + } + } + + private static void AssertContains(ExtensionManager manager, Type concreteType) where T : class, IExtension + { + // Act. + List extensions = manager.GetExtensions(); + + // Assert. + Assert.True( + extensions.Any(x => x.GetType() == concreteType), + $"Expected '{typeof(T).FullName}' extensions to contain concrete type '{concreteType.FullName}'."); + } + } +} diff --git a/src/Test/L0/HostContextL0.cs b/src/Test/L0/HostContextL0.cs new file mode 100644 index 00000000000..40841a30165 --- /dev/null +++ b/src/Test/L0/HostContextL0.cs @@ -0,0 +1,84 @@ +using GitHub.Runner.Common.Util; +using System.IO; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Threading; +using Xunit; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class HostContextL0 + { + private HostContext _hc; + private CancellationTokenSource _tokenSource; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void CreateServiceReturnsNewInstance() + { + try + { + // Arrange. + Setup(); + + // Act. + var reference1 = _hc.CreateService(); + var reference2 = _hc.CreateService(); + + // Assert. + Assert.NotNull(reference1); + Assert.IsType(reference1); + Assert.NotNull(reference2); + Assert.IsType(reference2); + Assert.False(object.ReferenceEquals(reference1, reference2)); + } + finally + { + // Cleanup. + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetServiceReturnsSingleton() + { + try + { + // Arrange. + Setup(); + + // Act. + var reference1 = _hc.GetService(); + var reference2 = _hc.GetService(); + + // Assert. + Assert.NotNull(reference1); + Assert.IsType(reference1); + Assert.NotNull(reference2); + Assert.True(object.ReferenceEquals(reference1, reference2)); + } + finally + { + // Cleanup. + Teardown(); + } + } + + public void Setup([CallerMemberName] string testName = "") + { + _tokenSource = new CancellationTokenSource(); + _hc = new HostContext( + hostType: "L0Test", + logFile: Path.Combine(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), $"trace_{nameof(HostContextL0)}_{testName}.log")); + } + + public void Teardown() + { + _hc?.Dispose(); + _tokenSource?.Dispose(); + } + } +} diff --git a/src/Test/L0/Listener/AgentL0.cs b/src/Test/L0/Listener/AgentL0.cs new file mode 100644 index 00000000000..b5ffa7b6f4d --- /dev/null +++ b/src/Test/L0/Listener/AgentL0.cs @@ -0,0 +1,574 @@ +// using GitHub.DistributedTask.WebApi; +// using GitHub.Runner.Listener; +// using GitHub.Runner.Listener.Configuration; +// using Moq; +// using System; +// using System.Collections.Generic; +// using System.Threading; +// using System.Threading.Tasks; +// using Xunit; +// using GitHub.Services.WebApi; +// using Pipelines = GitHub.DistributedTask.Pipelines; +// using GitHub.Runner.Common.Util; + +// namespace GitHub.Runner.Common.Tests.Listener +// { +// public sealed class AgentL0 +// { +// private Mock _configurationManager; +// private Mock _jobNotification; +// private Mock _messageListener; +// private Mock _promptManager; +// private Mock _jobDispatcher; +// private Mock _agentServer; +// private Mock _term; +// private Mock _configStore; +// private Mock _proxy; +// private Mock _cert; +// private Mock _updater; + +// public AgentL0() +// { +// _configurationManager = new Mock(); +// _jobNotification = new Mock(); +// _messageListener = new Mock(); +// _promptManager = new Mock(); +// _jobDispatcher = new Mock(); +// _agentServer = new Mock(); +// _term = new Mock(); +// _configStore = new Mock(); +// _proxy = new Mock(); +// _cert = new Mock(); +// _updater = new Mock(); +// } + +// private AgentJobRequestMessage CreateJobRequestMessage(string jobName) +// { +// TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); +// TimelineReference timeline = null; +// JobEnvironment environment = new JobEnvironment(); +// List tasks = new List(); +// Guid JobId = Guid.NewGuid(); +// var jobRequest = new AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, environment, tasks); +// return jobRequest as AgentJobRequestMessage; +// } + +// private JobCancelMessage CreateJobCancelMessage() +// { +// var message = new JobCancelMessage(Guid.NewGuid(), TimeSpan.FromSeconds(0)); +// return message; +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// //process 2 new job messages, and one cancel message +// public async void TestRunAsync() +// { +// using (var hc = new TestHostContext(this)) +// { +// //Arrange +// var agent = new Runner.Listener.Runner(); +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_jobNotification.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_agentServer.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); +// agent.Initialize(hc); +// var settings = new RunnerSettings +// { +// PoolId = 43242 +// }; + +// var message = new TaskAgentMessage() +// { +// Body = JsonUtility.ToString(CreateJobRequestMessage("job1")), +// MessageId = 4234, +// MessageType = JobRequestMessageTypes.AgentJobRequest +// }; + +// var messages = new Queue(); +// messages.Enqueue(message); +// var signalWorkerComplete = new SemaphoreSlim(0, 1); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(settings); +// _configurationManager.Setup(x => x.IsConfigured()) +// .Returns(true); +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(true)); +// _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) +// .Returns(async () => +// { +// if (0 == messages.Count) +// { +// signalWorkerComplete.Release(); +// await Task.Delay(2000, hc.RunnerShutdownToken); +// } + +// return messages.Dequeue(); +// }); +// _messageListener.Setup(x => x.DeleteSessionAsync()) +// .Returns(Task.CompletedTask); +// _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) +// .Returns(Task.CompletedTask); +// _jobDispatcher.Setup(x => x.Run(It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); + +// hc.EnqueueInstance(_jobDispatcher.Object); + +// _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); +// //Act +// var command = new CommandSettings(hc, new string[] { "run" }); +// Task agentTask = agent.ExecuteCommand(command); + +// //Assert +// //wait for the agent to run one job +// if (!await signalWorkerComplete.WaitAsync(2000)) +// { +// Assert.True(false, $"{nameof(_messageListener.Object.GetNextMessageAsync)} was not invoked."); +// } +// else +// { +// //Act +// hc.ShutdownRunner(ShutdownReason.UserCancelled); //stop Agent + +// //Assert +// Task[] taskToWait2 = { agentTask, Task.Delay(2000) }; +// //wait for the Agent to exit +// await Task.WhenAny(taskToWait2); + +// Assert.True(agentTask.IsCompleted, $"{nameof(agent.ExecuteCommand)} timed out."); +// Assert.True(!agentTask.IsFaulted, agentTask.Exception?.ToString()); +// Assert.True(agentTask.IsCanceled); + +// _jobDispatcher.Verify(x => x.Run(It.IsAny(), It.IsAny()), Times.Once(), +// $"{nameof(_jobDispatcher.Object.Run)} was not invoked."); +// _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); +// _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); +// _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.AtLeastOnce()); +// } +// } +// } + +// public static TheoryData RunAsServiceTestData = new TheoryData() +// { +// // staring with run command, configured as run as service, should start the agent +// { new [] { "run" }, true, Times.Once() }, +// // starting with no argument, configured not to run as service, should start agent interactively +// { new [] { "run" }, false, Times.Once() } +// }; +// [Theory] +// [MemberData("RunAsServiceTestData")] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// public async void TestExecuteCommandForRunAsService(string[] args, bool configureAsService, Times expectedTimes) +// { +// using (var hc = new TestHostContext(this)) +// { +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); + +// var command = new CommandSettings(hc, args); + +// _configurationManager.Setup(x => x.IsConfigured()).Returns(true); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(new RunnerSettings { }); + +// _configStore.Setup(x => x.IsServiceConfigured()).Returns(configureAsService); + +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(false)); + +// var agent = new Runner.Listener.Runner(); +// agent.Initialize(hc); +// await agent.ExecuteCommand(command); + +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), expectedTimes); +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// //process 2 new job messages, and one cancel message +// public async void TestMachineProvisionerCLI() +// { +// using (var hc = new TestHostContext(this)) +// { +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); + +// var command = new CommandSettings(hc, new[] { "run" }); + +// _configurationManager.Setup(x => x.IsConfigured()). +// Returns(true); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(new RunnerSettings { }); + +// _configStore.Setup(x => x.IsServiceConfigured()) +// .Returns(false); + +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(false)); + +// var agent = new Runner.Listener.Runner(); +// agent.Initialize(hc); +// await agent.ExecuteCommand(command); + +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// //process 2 new job messages, and one cancel message +// public async void TestMachineProvisionerCLICompat() +// { +// using (var hc = new TestHostContext(this)) +// { +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); + +// var command = new CommandSettings(hc, new string[] { }); + +// _configurationManager.Setup(x => x.IsConfigured()). +// Returns(true); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(new RunnerSettings { }); + +// _configStore.Setup(x => x.IsServiceConfigured()) +// .Returns(false); + +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(false)); + +// var agent = new Runner.Listener.Runner(); +// agent.Initialize(hc); +// await agent.ExecuteCommand(command); + +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// public async void TestRunOnce() +// { +// using (var hc = new TestHostContext(this)) +// { +// //Arrange +// var agent = new Runner.Listener.Runner(); +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_jobNotification.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_agentServer.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); +// agent.Initialize(hc); +// var settings = new RunnerSettings +// { +// PoolId = 43242 +// }; + +// var message = new TaskAgentMessage() +// { +// Body = JsonUtility.ToString(CreateJobRequestMessage("job1")), +// MessageId = 4234, +// MessageType = JobRequestMessageTypes.AgentJobRequest +// }; + +// var messages = new Queue(); +// messages.Enqueue(message); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(settings); +// _configurationManager.Setup(x => x.IsConfigured()) +// .Returns(true); +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(true)); +// _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) +// .Returns(async () => +// { +// if (0 == messages.Count) +// { +// await Task.Delay(2000); +// } + +// return messages.Dequeue(); +// }); +// _messageListener.Setup(x => x.DeleteSessionAsync()) +// .Returns(Task.CompletedTask); +// _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) +// .Returns(Task.CompletedTask); + +// var runOnceJobCompleted = new TaskCompletionSource(); +// _jobDispatcher.Setup(x => x.RunOnceJobCompleted) +// .Returns(runOnceJobCompleted); +// _jobDispatcher.Setup(x => x.Run(It.IsAny(), It.IsAny())) +// .Callback(() => +// { +// runOnceJobCompleted.TrySetResult(true); +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); + +// hc.EnqueueInstance(_jobDispatcher.Object); + +// _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); +// //Act +// var command = new CommandSettings(hc, new string[] { "run", "--once" }); +// Task agentTask = agent.ExecuteCommand(command); + +// //Assert +// //wait for the agent to run one job and exit +// await Task.WhenAny(agentTask, Task.Delay(30000)); + +// Assert.True(agentTask.IsCompleted, $"{nameof(agent.ExecuteCommand)} timed out."); +// Assert.True(!agentTask.IsFaulted, agentTask.Exception?.ToString()); +// Assert.True(agentTask.Result == Constants.Runner.ReturnCode.Success); + +// _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once(), +// $"{nameof(_jobDispatcher.Object.Run)} was not invoked."); +// _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); +// _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); +// _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.AtLeastOnce()); +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// public async void TestRunOnceOnlyTakeOneJobMessage() +// { +// using (var hc = new TestHostContext(this)) +// { +// //Arrange +// var agent = new Runner.Listener.Runner(); +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_jobNotification.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_agentServer.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); +// agent.Initialize(hc); +// var settings = new RunnerSettings +// { +// PoolId = 43242 +// }; + +// var message1 = new TaskAgentMessage() +// { +// Body = JsonUtility.ToString(CreateJobRequestMessage("job1")), +// MessageId = 4234, +// MessageType = JobRequestMessageTypes.AgentJobRequest +// }; +// var message2 = new TaskAgentMessage() +// { +// Body = JsonUtility.ToString(CreateJobRequestMessage("job1")), +// MessageId = 4235, +// MessageType = JobRequestMessageTypes.AgentJobRequest +// }; + +// var messages = new Queue(); +// messages.Enqueue(message1); +// messages.Enqueue(message2); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(settings); +// _configurationManager.Setup(x => x.IsConfigured()) +// .Returns(true); +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(true)); +// _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) +// .Returns(async () => +// { +// if (0 == messages.Count) +// { +// await Task.Delay(2000); +// } + +// return messages.Dequeue(); +// }); +// _messageListener.Setup(x => x.DeleteSessionAsync()) +// .Returns(Task.CompletedTask); +// _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) +// .Returns(Task.CompletedTask); + +// var runOnceJobCompleted = new TaskCompletionSource(); +// _jobDispatcher.Setup(x => x.RunOnceJobCompleted) +// .Returns(runOnceJobCompleted); +// _jobDispatcher.Setup(x => x.Run(It.IsAny(), It.IsAny())) +// .Callback(() => +// { +// runOnceJobCompleted.TrySetResult(true); +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); + +// hc.EnqueueInstance(_jobDispatcher.Object); + +// _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); +// //Act +// var command = new CommandSettings(hc, new string[] { "run", "--once" }); +// Task agentTask = agent.ExecuteCommand(command); + +// //Assert +// //wait for the agent to run one job and exit +// await Task.WhenAny(agentTask, Task.Delay(30000)); + +// Assert.True(agentTask.IsCompleted, $"{nameof(agent.ExecuteCommand)} timed out."); +// Assert.True(!agentTask.IsFaulted, agentTask.Exception?.ToString()); +// Assert.True(agentTask.Result == Constants.Runner.ReturnCode.Success); + +// _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Once(), +// $"{nameof(_jobDispatcher.Object.Run)} was not invoked."); +// _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); +// _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); +// _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.Once()); +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Agent")] +// public async void TestRunOnceHandleUpdateMessage() +// { +// using (var hc = new TestHostContext(this)) +// { +// //Arrange +// var agent = new Runner.Listener.Runner(); +// hc.SetSingleton(_configurationManager.Object); +// hc.SetSingleton(_jobNotification.Object); +// hc.SetSingleton(_messageListener.Object); +// hc.SetSingleton(_promptManager.Object); +// hc.SetSingleton(_agentServer.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_configStore.Object); +// hc.SetSingleton(_updater.Object); + +// agent.Initialize(hc); +// var settings = new RunnerSettings +// { +// PoolId = 43242, +// AgentId = 5678 +// }; + +// var message1 = new TaskAgentMessage() +// { +// Body = JsonUtility.ToString(new AgentRefreshMessage(settings.AgentId, "2.123.0")), +// MessageId = 4234, +// MessageType = AgentRefreshMessage.MessageType +// }; + +// var messages = new Queue(); +// messages.Enqueue(message1); +// _updater.Setup(x => x.SelfUpdate(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) +// .Returns(Task.FromResult(true)); +// _configurationManager.Setup(x => x.LoadSettings()) +// .Returns(settings); +// _configurationManager.Setup(x => x.IsConfigured()) +// .Returns(true); +// _messageListener.Setup(x => x.CreateSessionAsync(It.IsAny())) +// .Returns(Task.FromResult(true)); +// _messageListener.Setup(x => x.GetNextMessageAsync(It.IsAny())) +// .Returns(async () => +// { +// if (0 == messages.Count) +// { +// await Task.Delay(2000); +// } + +// return messages.Dequeue(); +// }); +// _messageListener.Setup(x => x.DeleteSessionAsync()) +// .Returns(Task.CompletedTask); +// _messageListener.Setup(x => x.DeleteMessageAsync(It.IsAny())) +// .Returns(Task.CompletedTask); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); +// _jobNotification.Setup(x => x.StartClient(It.IsAny(), It.IsAny())) +// .Callback(() => +// { + +// }); + +// hc.EnqueueInstance(_jobDispatcher.Object); + +// _configStore.Setup(x => x.IsServiceConfigured()).Returns(false); +// //Act +// var command = new CommandSettings(hc, new string[] { "run", "--once" }); +// Task agentTask = agent.ExecuteCommand(command); + +// //Assert +// //wait for the agent to exit with right return code +// await Task.WhenAny(agentTask, Task.Delay(30000)); + +// Assert.True(agentTask.IsCompleted, $"{nameof(agent.ExecuteCommand)} timed out."); +// Assert.True(!agentTask.IsFaulted, agentTask.Exception?.ToString()); +// Assert.True(agentTask.Result == Constants.Runner.ReturnCode.RunOnceRunnerUpdating); + +// _updater.Verify(x => x.SelfUpdate(It.IsAny(), It.IsAny(), false, It.IsAny()), Times.Once); +// _jobDispatcher.Verify(x => x.Run(It.IsAny(), true), Times.Never()); +// _messageListener.Verify(x => x.GetNextMessageAsync(It.IsAny()), Times.AtLeastOnce()); +// _messageListener.Verify(x => x.CreateSessionAsync(It.IsAny()), Times.Once()); +// _messageListener.Verify(x => x.DeleteSessionAsync(), Times.Once()); +// _messageListener.Verify(x => x.DeleteMessageAsync(It.IsAny()), Times.Once()); +// } +// } +// } +// } diff --git a/src/Test/L0/Listener/CommandSettingsL0.cs b/src/Test/L0/Listener/CommandSettingsL0.cs new file mode 100644 index 00000000000..289c1e80811 --- /dev/null +++ b/src/Test/L0/Listener/CommandSettingsL0.cs @@ -0,0 +1,784 @@ +using GitHub.Runner.Listener; +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Util; +using Moq; +using System; +using System.Runtime.CompilerServices; +using Xunit; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class CommandSettingsL0 + { + private readonly Mock _promptManager = new Mock(); + + // It is sufficient to test one arg only. All individual args are tested by the PromptsFor___ methods. + // The PromptsFor___ methods suffice to cover the interesting differences between each of the args. + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsArg() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--agent", "some agent" }); + + // Act. + string actual = command.GetAgentName(); + + // Assert. + Assert.Equal("some agent", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsArgFromEnvVar() + { + using (TestHostContext hc = CreateTestContext()) + { + try + { + // Arrange. + Environment.SetEnvironmentVariable("ACTIONS_RUNNER_INPUT_AGENT", "some agent"); + var command = new CommandSettings(hc, args: new string[0]); + + // Act. + string actual = command.GetAgentName(); + + // Assert. + Assert.Equal("some agent", actual); + Assert.Equal(string.Empty, Environment.GetEnvironmentVariable("ACTIONS_RUNNER_INPUT_AGENT") ?? string.Empty); // Should remove. + Assert.Equal(hc.SecretMasker.MaskSecrets("some agent"), "some agent"); + } + finally + { + Environment.SetEnvironmentVariable("ACTIONS_RUNNER_INPUT_AGENT", null); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsArgSecretFromEnvVar() + { + using (TestHostContext hc = CreateTestContext()) + { + try + { + // Arrange. + Environment.SetEnvironmentVariable("ACTIONS_RUNNER_INPUT_TOKEN", "some secret token value"); + var command = new CommandSettings(hc, args: new string[0]); + + // Act. + string actual = command.GetToken(); + + // Assert. + Assert.Equal("some secret token value", actual); + Assert.Equal(string.Empty, Environment.GetEnvironmentVariable("ACTIONS_RUNNER_INPUT_TOKEN") ?? string.Empty); // Should remove. + Assert.Equal(hc.SecretMasker.MaskSecrets("some secret token value"), "***"); + } + finally + { + Environment.SetEnvironmentVariable("ACTIONS_RUNNER_INPUT_TOKEN", null); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsCommandConfigure() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "configure" }); + + // Act. + bool actual = command.Configure; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsCommandRun() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "run" }); + + // Act. + bool actual = command.Run; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsCommandUnconfigure() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "remove" }); + + // Act. + bool actual = command.Remove; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagCommit() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--commit" }); + + // Act. + bool actual = command.Commit; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagHelp() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--help" }); + + // Act. + bool actual = command.Help; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagReplace() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--replace" }); + + // Act. + bool actual = command.GetReplace(); + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagRunAsService() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--runasservice" }); + + // Act. + bool actual = command.GetRunAsService(); + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagUnattended() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--unattended" }); + + // Act. + bool actual = command.Unattended; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagUnattendedFromEnvVar() + { + using (TestHostContext hc = CreateTestContext()) + { + try + { + // Arrange. + Environment.SetEnvironmentVariable("ACTIONS_RUNNER_INPUT_UNATTENDED", "true"); + var command = new CommandSettings(hc, args: new string[0]); + + // Act. + bool actual = command.Unattended; + + // Assert. + Assert.Equal(true, actual); + Assert.Equal(string.Empty, Environment.GetEnvironmentVariable("ACTIONS_RUNNER_INPUT_UNATTENDED") ?? string.Empty); // Should remove. + } + finally + { + Environment.SetEnvironmentVariable("ACTIONS_RUNNER_INPUT_UNATTENDED", null); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void GetsFlagVersion() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--version" }); + + // Act. + bool actual = command.Version; + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PassesUnattendedToReadBool() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--unattended" }); + _promptManager + .Setup(x => x.ReadBool( + Constants.Runner.CommandLine.Flags.Replace, // argName + "Would you like to replace the existing runner? (Y/N)", // description + false, // defaultValue + true)) // unattended + .Returns(true); + + // Act. + bool actual = command.GetReplace(); + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PassesUnattendedToReadValue() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--unattended" }); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Agent, // argName + "Enter the name of runner:", // description + false, // secret + Environment.MachineName, // defaultValue + Validators.NonEmptyValidator, // validator + true)) // unattended + .Returns("some agent"); + + // Act. + string actual = command.GetAgentName(); + + // Assert. + Assert.Equal("some agent", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForAgent() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Agent, // argName + "Enter the name of runner:", // description + false, // secret + Environment.MachineName, // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some agent"); + + // Act. + string actual = command.GetAgentName(); + + // Assert. + Assert.Equal("some agent", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForAuth() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Auth, // argName + "How would you like to authenticate?", // description + false, // secret + "some default auth", // defaultValue + Validators.AuthSchemeValidator, // validator + false)) // unattended + .Returns("some auth"); + + // Act. + string actual = command.GetAuth("some default auth"); + + // Assert. + Assert.Equal("some auth", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForPassword() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Password, // argName + "What is your GitHub password?", // description + true, // secret + string.Empty, // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some password"); + + // Act. + string actual = command.GetPassword(); + + // Assert. + Assert.Equal("some password", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForPool() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Pool, // argName + "Enter the name of your runner pool:", // description + false, // secret + "default", // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some pool"); + + // Act. + string actual = command.GetPool(); + + // Assert. + Assert.Equal("some pool", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForReplace() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadBool( + Constants.Runner.CommandLine.Flags.Replace, // argName + "Would you like to replace the existing runner? (Y/N)", // description + false, // defaultValue + false)) // unattended + .Returns(true); + + // Act. + bool actual = command.GetReplace(); + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForRunAsService() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadBool( + Constants.Runner.CommandLine.Flags.RunAsService, // argName + "Would you like to run the runner as service? (Y/N)", // description + false, // defaultValue + false)) // unattended + .Returns(true); + + // Act. + bool actual = command.GetRunAsService(); + + // Assert. + Assert.True(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForToken() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Token, // argName + "Enter your personal access token:", // description + true, // secret + string.Empty, // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some token"); + + // Act. + string actual = command.GetToken(); + + // Assert. + Assert.Equal("some token", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForUrl() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Url, // argName + "What is the URL of your repository?", // description + false, // secret + string.Empty, // defaultValue + Validators.ServerUrlValidator, // validator + false)) // unattended + .Returns("some url"); + + // Act. + string actual = command.GetUrl(); + + // Assert. + Assert.Equal("some url", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForUserName() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.UserName, // argName + "What is your GitHub username?", // description + false, // secret + string.Empty, // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some user name"); + + // Act. + string actual = command.GetUserName(); + + // Assert. + Assert.Equal("some user name", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForWindowsLogonAccount() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.WindowsLogonAccount, // argName + "User account to use for the service", // description + false, // secret + "some default account", // defaultValue + Validators.NTAccountValidator, // validator + false)) // unattended + .Returns("some windows logon account"); + + // Act. + string actual = command.GetWindowsLogonAccount("some default account", "User account to use for the service"); + + // Assert. + Assert.Equal("some windows logon account", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForWindowsLogonPassword() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + string accountName = "somewindowsaccount"; + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.WindowsLogonPassword, // argName + string.Format("Password for the account {0}", accountName), // description + true, // secret + string.Empty, // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some windows logon password"); + + // Act. + string actual = command.GetWindowsLogonPassword(accountName); + + // Assert. + Assert.Equal("some windows logon password", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsForWork() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[0]); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Work, // argName + "Enter name of work folder:", // description + false, // secret + "_work", // defaultValue + Validators.NonEmptyValidator, // validator + false)) // unattended + .Returns("some work"); + + // Act. + string actual = command.GetWork(); + + // Assert. + Assert.Equal("some work", actual); + } + } + + // It is sufficient to test one arg only. All individual args are tested by the PromptsFor___ methods. + // The PromptsFor___ methods suffice to cover the interesting differences between each of the args. + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsWhenEmpty() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--url", "" }); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Url, // argName + "What is the URL of your repository?", // description + false, // secret + string.Empty, // defaultValue + Validators.ServerUrlValidator, // validator + false)) // unattended + .Returns("some url"); + + // Act. + string actual = command.GetUrl(); + + // Assert. + Assert.Equal("some url", actual); + } + } + + // It is sufficient to test one arg only. All individual args are tested by the PromptsFor___ methods. + // The PromptsFor___ methods suffice to cover the interesting differences between each of the args. + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void PromptsWhenInvalid() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--url", "notValid" }); + _promptManager + .Setup(x => x.ReadValue( + Constants.Runner.CommandLine.Args.Url, // argName + "What is the URL of your repository?", // description + false, // secret + string.Empty, // defaultValue + Validators.ServerUrlValidator, // validator + false)) // unattended + .Returns("some url"); + + // Act. + string actual = command.GetUrl(); + + // Assert. + Assert.Equal("some url", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void ValidateCommands() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "badcommand" }); + + // Assert. + Assert.True(command.Validate().Contains("badcommand")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void ValidateFlags() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--badflag" }); + + // Assert. + Assert.True(command.Validate().Contains("badflag")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void ValidateArgs() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, args: new string[] { "--badargname", "bad arg value" }); + + // Assert. + Assert.True(command.Validate().Contains("badargname")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", nameof(CommandSettings))] + public void ValidateGoodCommandline() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var command = new CommandSettings(hc, + args: new string[] { + "configure", + "--unattended", + "--agent", + "test agent" }); + + // Assert. + Assert.True(command.Validate().Count == 0); + } + } + + private TestHostContext CreateTestContext([CallerMemberName] string testName = "") + { + TestHostContext hc = new TestHostContext(this, testName); + hc.SetSingleton(_promptManager.Object); + return hc; + } + } +} diff --git a/src/Test/L0/Listener/Configuration/AgentCapabilitiesProviderTestL0.cs b/src/Test/L0/Listener/Configuration/AgentCapabilitiesProviderTestL0.cs new file mode 100644 index 00000000000..28d6175473a --- /dev/null +++ b/src/Test/L0/Listener/Configuration/AgentCapabilitiesProviderTestL0.cs @@ -0,0 +1,79 @@ +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Listener.Configuration; +using Moq; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Listener +{ + public sealed class AgentCapabilitiesProviderTestL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void TestGetCapabilities() + { + using (var hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + Mock configurationManager = new Mock(); + hc.SetSingleton(configurationManager.Object); + + // Arrange + var provider = new RunnerCapabilitiesProvider(); + provider.Initialize(hc); + var settings = new RunnerSettings() { AgentName = "IAmAgent007" }; + + // Act + List capabilities = await provider.GetCapabilitiesAsync(settings, tokenSource.Token); + + // Assert + Assert.NotNull(capabilities); + Capability runnerNameCapability = capabilities.SingleOrDefault(x => string.Equals(x.Name, "Runner.Name", StringComparison.Ordinal)); + Assert.NotNull(runnerNameCapability); + Assert.Equal("IAmAgent007", runnerNameCapability.Value); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void TestInteractiveSessionCapability() + { + using (var hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + hc.StartupType = StartupType.AutoStartup; + await VerifyInteractiveSessionCapability(hc, tokenSource.Token, true); + + hc.StartupType = StartupType.Service; + await VerifyInteractiveSessionCapability(hc, tokenSource.Token, false); + + hc.StartupType = StartupType.Manual; + await VerifyInteractiveSessionCapability(hc, tokenSource.Token, true); + } + } + + private async Task VerifyInteractiveSessionCapability(IHostContext hc, CancellationToken token, bool expectedValue) + { + // Arrange + var provider = new RunnerCapabilitiesProvider(); + provider.Initialize(hc); + var settings = new RunnerSettings() { AgentName = "IAmAgent007" }; + + // Act + List capabilities = await provider.GetCapabilitiesAsync(settings, token); + + // Assert + Assert.NotNull(capabilities); + Capability iSessionCapability = capabilities.SingleOrDefault(x => string.Equals(x.Name, "InteractiveSession", StringComparison.Ordinal)); + Assert.NotNull(iSessionCapability); + bool.TryParse(iSessionCapability.Value, out bool isInteractive); + Assert.Equal(expectedValue, isInteractive); + } + } +} diff --git a/src/Test/L0/Listener/Configuration/AgentCredentialL0.cs b/src/Test/L0/Listener/Configuration/AgentCredentialL0.cs new file mode 100644 index 00000000000..9d743d132df --- /dev/null +++ b/src/Test/L0/Listener/Configuration/AgentCredentialL0.cs @@ -0,0 +1,26 @@ +using GitHub.Runner.Listener; +using GitHub.Runner.Listener.Configuration; +using GitHub.Services.Client; +using GitHub.Services.Common; + +namespace GitHub.Runner.Common.Tests.Listener.Configuration +{ + public class TestAgentCredential : CredentialProvider + { + public TestAgentCredential(): base("TEST") {} + public override VssCredentials GetVssCredentials(IHostContext context) + { + Tracing trace = context.GetTrace("PersonalAccessToken"); + trace.Info("GetVssCredentials()"); + + VssBasicCredential loginCred = new VssBasicCredential("test", "password"); + VssCredentials creds = new VssCredentials(loginCred); + trace.Verbose("cred created"); + + return creds; + } + public override void EnsureCredential(IHostContext context, CommandSettings command, string serverUrl) + { + } + } +} diff --git a/src/Test/L0/Listener/Configuration/ArgumentValidatorTestsL0.cs b/src/Test/L0/Listener/Configuration/ArgumentValidatorTestsL0.cs new file mode 100644 index 00000000000..c43ca9853a5 --- /dev/null +++ b/src/Test/L0/Listener/Configuration/ArgumentValidatorTestsL0.cs @@ -0,0 +1,60 @@ +using GitHub.Runner.Listener.Configuration; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Listener.Configuration +{ + public sealed class ArgumentValidatorTestsL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ArgumentValidator")] + public void ServerUrlValidator() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Assert.True(Validators.ServerUrlValidator("http://servername")); + Assert.False(Validators.ServerUrlValidator("Fail")); + Assert.False(Validators.ServerUrlValidator("ftp://servername")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ArgumentValidator")] + public void AuthSchemeValidator() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Assert.True(Validators.AuthSchemeValidator("pat")); + Assert.False(Validators.AuthSchemeValidator("Fail")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ArgumentValidator")] + public void NonEmptyValidator() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Assert.True(Validators.NonEmptyValidator("test")); + Assert.False(Validators.NonEmptyValidator(string.Empty)); + } + } + + +#if OS_WINDOWS + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ArgumentValidator")] +#endif + public void WindowsLogonAccountValidator() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Assert.False(Validators.NTAccountValidator(string.Empty)); + Assert.True(Validators.NTAccountValidator("NT AUTHORITY\\LOCAL SERVICE")); + } + } + } +} diff --git a/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs b/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs new file mode 100644 index 00000000000..0fd942764ed --- /dev/null +++ b/src/Test/L0/Listener/Configuration/ConfigurationManagerL0.cs @@ -0,0 +1,220 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Listener; +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Util; +using GitHub.Services.WebApi; +using Moq; +using Newtonsoft.Json; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Security.Cryptography; +using System.Threading.Tasks; +using Xunit; +using GitHub.Services.Location; +using GitHub.Services.Common; + +namespace GitHub.Runner.Common.Tests.Listener.Configuration +{ + public class ConfigurationManagerL0 + { + private Mock _agentServer; + private Mock _locationServer; + private Mock _credMgr; + private Mock _promptManager; + private Mock _store; + private Mock _extnMgr; + // private Mock _machineGroupServer; + private Mock _runnerWebProxy; + private Mock _cert; + +#if OS_WINDOWS + private Mock _serviceControlManager; +#endif + +#if !OS_WINDOWS + private Mock _serviceControlManager; +#endif + + private Mock _rsaKeyManager; + private ICapabilitiesManager _capabilitiesManager; + // private DeploymentGroupAgentConfigProvider _deploymentGroupAgentConfigProvider; + private string _expectedToken = "expectedToken"; + private string _expectedServerUrl = "https://localhost"; + private string _expectedAgentName = "expectedAgentName"; + private string _expectedPoolName = "poolName"; + private string _expectedCollectionName = "testCollectionName"; + private string _expectedProjectName = "testProjectName"; + private string _expectedProjectId = "edf3f94e-d251-49df-bfce-602d6c967409"; + private string _expectedMachineGroupName = "testMachineGroupName"; + private string _expectedAuthType = "pat"; + private string _expectedWorkFolder = "_work"; + private int _expectedPoolId = 1; + private int _expectedDeploymentMachineId = 81; + private RSACryptoServiceProvider rsa = null; + private RunnerSettings _configMgrAgentSettings = new RunnerSettings(); + + public ConfigurationManagerL0() + { + _agentServer = new Mock(); + _locationServer = new Mock(); + _credMgr = new Mock(); + _promptManager = new Mock(); + _store = new Mock(); + _extnMgr = new Mock(); + _rsaKeyManager = new Mock(); + // _machineGroupServer = new Mock(); + _runnerWebProxy = new Mock(); + _cert = new Mock(); + +#if OS_WINDOWS + _serviceControlManager = new Mock(); +#endif + +#if !OS_WINDOWS + _serviceControlManager = new Mock(); +#endif + + _capabilitiesManager = new CapabilitiesManager(); + + var expectedAgent = new TaskAgent(_expectedAgentName) { Id = 1 }; + var expectedDeploymentMachine = new DeploymentMachine() { Agent = expectedAgent, Id = _expectedDeploymentMachineId }; + expectedAgent.Authorization = new TaskAgentAuthorization + { + ClientId = Guid.NewGuid(), + AuthorizationUrl = new Uri("http://localhost:8080/pipelines"), + }; + + var connectionData = new ConnectionData() + { + InstanceId = Guid.NewGuid(), + DeploymentType = DeploymentFlags.Hosted, + DeploymentId = Guid.NewGuid() + }; + _agentServer.Setup(x => x.ConnectAsync(It.IsAny(), It.IsAny())).Returns(Task.FromResult(null)); + _locationServer.Setup(x => x.ConnectAsync(It.IsAny())).Returns(Task.FromResult(null)); + _locationServer.Setup(x => x.GetConnectionDataAsync()).Returns(Task.FromResult(connectionData)); + // _machineGroupServer.Setup(x => x.ConnectAsync(It.IsAny())).Returns(Task.FromResult(null)); + // _machineGroupServer.Setup(x => x.UpdateDeploymentTargetsAsync(It.IsAny(), It.IsAny(), It.IsAny>())); + // _machineGroupServer.Setup(x => x.AddDeploymentTargetAsync(It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(expectedDeploymentMachine)); + // _machineGroupServer.Setup(x => x.ReplaceDeploymentTargetAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(expectedDeploymentMachine)); + // _machineGroupServer.Setup(x => x.GetDeploymentTargetsAsync(It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(new List() { })); + // _machineGroupServer.Setup(x => x.DeleteDeploymentTargetAsync(It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(null)); + + _store.Setup(x => x.IsConfigured()).Returns(false); + _store.Setup(x => x.HasCredentials()).Returns(false); + _store.Setup(x => x.GetSettings()).Returns(() => _configMgrAgentSettings); + + _store.Setup(x => x.SaveSettings(It.IsAny())) + .Callback((RunnerSettings settings) => + { + _configMgrAgentSettings = settings; + }); + + _credMgr.Setup(x => x.GetCredentialProvider(It.IsAny())).Returns(new TestAgentCredential()); + +#if !OS_WINDOWS + _serviceControlManager.Setup(x => x.GenerateScripts(It.IsAny())); +#endif + + var expectedPools = new List() { new TaskAgentPool(_expectedPoolName) { Id = _expectedPoolId } }; + _agentServer.Setup(x => x.GetAgentPoolsAsync(It.IsAny(), It.IsAny())).Returns(Task.FromResult(expectedPools)); + + var expectedAgents = new List(); + _agentServer.Setup(x => x.GetAgentsAsync(It.IsAny(), It.IsAny())).Returns(Task.FromResult(expectedAgents)); + + _agentServer.Setup(x => x.AddAgentAsync(It.IsAny(), It.IsAny())).Returns(Task.FromResult(expectedAgent)); + _agentServer.Setup(x => x.UpdateAgentAsync(It.IsAny(), It.IsAny())).Returns(Task.FromResult(expectedAgent)); + + rsa = new RSACryptoServiceProvider(2048); + + _rsaKeyManager.Setup(x => x.CreateKey()).Returns(rsa); + } + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + TestHostContext tc = new TestHostContext(this, testName); + tc.SetSingleton(_credMgr.Object); + tc.SetSingleton(_promptManager.Object); + tc.SetSingleton(_store.Object); + tc.SetSingleton(_extnMgr.Object); + tc.SetSingleton(_agentServer.Object); + tc.SetSingleton(_locationServer.Object); + // tc.SetSingleton(_machineGroupServer.Object); + tc.SetSingleton(_capabilitiesManager); + tc.SetSingleton(_runnerWebProxy.Object); + tc.SetSingleton(_cert.Object); + +#if OS_WINDOWS + tc.SetSingleton(_serviceControlManager.Object); +#else + tc.SetSingleton(_serviceControlManager.Object); +#endif + + tc.SetSingleton(_rsaKeyManager.Object); + + return tc; + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ConfigurationManagement")] + public async Task CanEnsureConfigure() + { + using (TestHostContext tc = CreateTestContext()) + { + Tracing trace = tc.GetTrace(); + + trace.Info("Creating config manager"); + IConfigurationManager configManager = new ConfigurationManager(); + configManager.Initialize(tc); + + trace.Info("Preparing command line arguments"); + var command = new CommandSettings( + tc, + new[] + { + "configure", +#if !OS_WINDOWS + "--acceptteeeula", +#endif + "--url", _expectedServerUrl, + "--agent", _expectedAgentName, + "--pool", _expectedPoolName, + "--work", _expectedWorkFolder, + "--auth", _expectedAuthType, + "--token", _expectedToken + }); + trace.Info("Constructed."); + _store.Setup(x => x.IsConfigured()).Returns(false); + _configMgrAgentSettings = null; + + trace.Info("Ensuring all the required parameters are available in the command line parameter"); + await configManager.ConfigureAsync(command); + + _store.Setup(x => x.IsConfigured()).Returns(true); + + trace.Info("Configured, verifying all the parameter value"); + var s = configManager.LoadSettings(); + Assert.NotNull(s); + Assert.True(s.ServerUrl.Equals(_expectedServerUrl)); + Assert.True(s.AgentName.Equals(_expectedAgentName)); + Assert.True(s.PoolId.Equals(_expectedPoolId)); + Assert.True(s.WorkFolder.Equals(_expectedWorkFolder)); + + // validate GetAgentPoolsAsync gets called once with automation pool type + _agentServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny(), It.Is(p => p == TaskAgentPoolType.Automation)), Times.Once); + + // validate GetAgentPoolsAsync not called with deployment pool type + _agentServer.Verify(x => x.GetAgentPoolsAsync(It.IsAny(), It.Is(p => p == TaskAgentPoolType.Deployment)), Times.Never); + + // For build and release agent / deployment pool, tags logic should not get trigger; + // _machineGroupServer.Verify(x => + // x.UpdateDeploymentTargetsAsync(It.IsAny(), It.IsAny(), It.IsAny>()), Times.Never); + } + } + } +} diff --git a/src/Test/L0/Listener/Configuration/NativeWindowsServiceHelperL0.cs b/src/Test/L0/Listener/Configuration/NativeWindowsServiceHelperL0.cs new file mode 100644 index 00000000000..61fdcd1ce16 --- /dev/null +++ b/src/Test/L0/Listener/Configuration/NativeWindowsServiceHelperL0.cs @@ -0,0 +1,55 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using GitHub.Runner.Listener.Configuration; +using Moq; +using Xunit; +using System.Security.Principal; +using GitHub.Runner.Common; +using GitHub.Runner.Common.Tests; + +namespace Test.L0.Listener.Configuration +{ + public class NativeWindowsServiceHelperL0 + { + +#if OS_WINDOWS + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ConfigurationManagement")] + public void EnsureGetDefaultServiceAccountShouldReturnNetworkServiceAccount() + { + using (TestHostContext tc = new TestHostContext(this, "EnsureGetDefaultServiceAccountShouldReturnNetworkServiceAccount")) + { + Tracing trace = tc.GetTrace(); + + trace.Info("Creating an instance of the NativeWindowsServiceHelper class"); + var windowsServiceHelper = new NativeWindowsServiceHelper(); + + trace.Info("Trying to get the Default Service Account when a BuildRelease Agent is being configured"); + var defaultServiceAccount = windowsServiceHelper.GetDefaultServiceAccount(); + Assert.True(defaultServiceAccount.ToString().Equals(@"NT AUTHORITY\NETWORK SERVICE"), "If agent is getting configured as build-release agent, default service accout should be 'NT AUTHORITY\\NETWORK SERVICE'"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "ConfigurationManagement")] + public void EnsureGetDefaultAdminServiceAccountShouldReturnLocalSystemAccount() + { + using (TestHostContext tc = new TestHostContext(this, "EnsureGetDefaultAdminServiceAccountShouldReturnLocalSystemAccount")) + { + Tracing trace = tc.GetTrace(); + + trace.Info("Creating an instance of the NativeWindowsServiceHelper class"); + var windowsServiceHelper = new NativeWindowsServiceHelper(); + + trace.Info("Trying to get the Default Service Account when a DeploymentAgent is being configured"); + var defaultServiceAccount = windowsServiceHelper.GetDefaultAdminServiceAccount(); + Assert.True(defaultServiceAccount.ToString().Equals(@"NT AUTHORITY\SYSTEM"), "If agent is getting configured as deployment agent, default service accout should be 'NT AUTHORITY\\SYSTEM'"); + } + } +#endif + } +} diff --git a/src/Test/L0/Listener/Configuration/PromptManagerTestsL0.cs b/src/Test/L0/Listener/Configuration/PromptManagerTestsL0.cs new file mode 100644 index 00000000000..cfc8e03a3a2 --- /dev/null +++ b/src/Test/L0/Listener/Configuration/PromptManagerTestsL0.cs @@ -0,0 +1,228 @@ +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Common.Util; +using Moq; +using System; +using System.Collections.Generic; +using Xunit; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests.Listener.Configuration +{ + public class PromptManagerTestsL0 + { + private readonly string _argName = "SomeArgName"; + private readonly string _description = "Some description"; + private readonly PromptManager _promptManager = new PromptManager(); + private readonly Mock _terminal = new Mock(); + private readonly string _unattendedExceptionMessage = "Invalid configuration provided for SomeArgName. Terminating unattended configuration."; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void FallsBackToDefault() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + _terminal + .Setup(x => x.ReadLine()) + .Returns(string.Empty); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + // Act. + string actual = ReadValue(defaultValue: "Some default value"); + + // Assert. + Assert.Equal("Some default value", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void FallsBackToDefaultWhenTrimmed() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + _terminal + .Setup(x => x.ReadLine()) + .Returns(" "); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + // Act. + string actual = ReadValue(defaultValue: "Some default value"); + + // Assert. + Assert.Equal("Some default value", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void FallsBackToDefaultWhenUnattended() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + _terminal + .Setup(x => x.ReadLine()) + .Throws(); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + // Act. + string actual = ReadValue( + defaultValue: "Some default value", + unattended: true); + + // Assert. + Assert.Equal("Some default value", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void Prompts() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + _terminal + .Setup(x => x.ReadLine()) + .Returns("Some prompt value"); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + // Act. + string actual = ReadValue(); + + // Assert. + Assert.Equal("Some prompt value", actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void PromptsAgainWhenEmpty() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var readLineValues = new Queue(new[] { string.Empty, "Some prompt value" }); + _terminal + .Setup(x => x.ReadLine()) + .Returns(() => readLineValues.Dequeue()); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + // Act. + string actual = ReadValue(); + + // Assert. + Assert.Equal("Some prompt value", actual); + _terminal.Verify(x => x.ReadLine(), Times.Exactly(2)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void PromptsAgainWhenFailsValidation() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var readLineValues = new Queue(new[] { "Some invalid prompt value", "Some valid prompt value" }); + _terminal + .Setup(x => x.ReadLine()) + .Returns(() => readLineValues.Dequeue()); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + // Act. + string actual = ReadValue(validator: x => x == "Some valid prompt value"); + + // Assert. + Assert.Equal("Some valid prompt value", actual); + _terminal.Verify(x => x.ReadLine(), Times.Exactly(2)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "PromptManager")] + public void ThrowsWhenUnattended() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + _terminal + .Setup(x => x.ReadLine()) + .Throws(); + _terminal + .Setup(x => x.ReadSecret()) + .Throws(); + hc.SetSingleton(_terminal.Object); + _promptManager.Initialize(hc); + + try + { + // Act. + string actual = ReadValue(unattended: true); + + // Assert. + throw new InvalidOperationException(); + } + catch (Exception ex) + { + // Assert. + Assert.Equal(_unattendedExceptionMessage, ex.Message); + } + } + } + + private string ReadValue( + bool secret = false, + string defaultValue = null, + Func validator = null, + bool unattended = false) + { + return _promptManager.ReadValue( + argName: _argName, + description: _description, + secret: secret, + defaultValue: defaultValue, + validator: validator ?? DefaultValidator, + unattended: unattended); + } + + private static bool DefaultValidator(string val) + { + return true; + } + } +} diff --git a/src/Test/L0/Listener/JobDispatcherL0.cs b/src/Test/L0/Listener/JobDispatcherL0.cs new file mode 100644 index 00000000000..abe27c39bbd --- /dev/null +++ b/src/Test/L0/Listener/JobDispatcherL0.cs @@ -0,0 +1,502 @@ +using System; +using System.Collections.Generic; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Listener; +using GitHub.Services.WebApi; +using Moq; +using Xunit; + +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Common.Tests.Listener +{ + public sealed class JobDispatcherL0 + { + private Mock _processChannel; + private Mock _processInvoker; + private Mock _agentServer; + private Mock _configurationStore; + + public JobDispatcherL0() + { + _processChannel = new Mock(); + _processInvoker = new Mock(); + _agentServer = new Mock(); + _configurationStore = new Mock(); + } + + private Pipelines.AgentJobRequestMessage CreateJobRequestMessage() + { + TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); + TimelineReference timeline = null; + JobEnvironment environment = new JobEnvironment(); + List tasks = new List(); + Guid JobId = Guid.NewGuid(); + var jobRequest = new AgentJobRequestMessage(plan, timeline, JobId, "someJob", "someJob", environment, tasks); + var result = Pipelines.AgentJobRequestMessageUtil.Convert(jobRequest); + result.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + return result; + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatchesJobRequest() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + var jobDispatcher = new JobDispatcher(); + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_agentServer.Object); + + hc.EnqueueInstance(_processChannel.Object); + hc.EnqueueInstance(_processInvoker.Object); + + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + jobDispatcher.Initialize(hc); + + var ts = new CancellationTokenSource(); + Pipelines.AgentJobRequestMessage message = CreateJobRequestMessage(); + string strMessage = JsonUtility.ToString(message); + + _processInvoker.Setup(x => x.ExecuteAsync(It.IsAny(), It.IsAny(), "spawnclient 1 2", null, It.IsAny())) + .Returns(Task.FromResult(56)); + + _processChannel.Setup(x => x.StartServer(It.IsAny())) + .Callback((StartProcessDelegate startDel) => { startDel("1", "2"); }); + _processChannel.Setup(x => x.SendAsync(MessageType.NewJobRequest, It.Is(s => s.Equals(strMessage)), It.IsAny())) + .Returns(Task.CompletedTask); + + var request = new TaskAgentJobRequest(); + PropertyInfo sessionIdProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(request)); + + _agentServer.Setup(x => x.FinishAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(new TaskAgentJobRequest())); + + + //Actt + jobDispatcher.Run(message); + + //Assert + await jobDispatcher.WaitAsync(CancellationToken.None); + + Assert.False(jobDispatcher.RunOnceJobCompleted.Task.IsCompleted, "JobDispatcher should not set task complete token for regular agent."); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatcherRenewJobRequest() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + int poolId = 1; + Int64 requestId = 1000; + int count = 0; + + var trace = hc.GetTrace(nameof(DispatcherRenewJobRequest)); + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + TaskAgentJobRequest request = new TaskAgentJobRequest(); + PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(lockUntilProperty); + lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + hc.SetSingleton(_agentServer.Object); + hc.SetSingleton(_configurationStore.Object); + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(() => + { + count++; + if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully) + { + trace.Info("First renew happens."); + } + + if (count < 5) + { + return Task.FromResult(request); + } + else if (count == 5) + { + cancellationTokenSource.Cancel(); + return Task.FromResult(request); + } + else + { + throw new InvalidOperationException("Should not reach here."); + } + }); + + var jobDispatcher = new JobDispatcher(); + jobDispatcher.Initialize(hc); + + await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, firstJobRequestRenewed, cancellationTokenSource.Token); + + Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully); + _agentServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(5)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatcherRenewJobRequestStopOnJobNotFoundExceptions() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + int poolId = 1; + Int64 requestId = 1000; + int count = 0; + + var trace = hc.GetTrace(nameof(DispatcherRenewJobRequestStopOnJobNotFoundExceptions)); + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + TaskAgentJobRequest request = new TaskAgentJobRequest(); + PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(lockUntilProperty); + lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + hc.SetSingleton(_agentServer.Object); + hc.SetSingleton(_configurationStore.Object); + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(() => + { + count++; + if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully) + { + trace.Info("First renew happens."); + } + + if (count < 5) + { + return Task.FromResult(request); + } + else if (count == 5) + { + cancellationTokenSource.CancelAfter(10000); + throw new TaskAgentJobNotFoundException(""); + } + else + { + throw new InvalidOperationException("Should not reach here."); + } + }); + + var jobDispatcher = new JobDispatcher(); + jobDispatcher.Initialize(hc); + + await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, firstJobRequestRenewed, cancellationTokenSource.Token); + + Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); + Assert.False(cancellationTokenSource.IsCancellationRequested); + _agentServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(5)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatcherRenewJobRequestStopOnJobTokenExpiredExceptions() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + int poolId = 1; + Int64 requestId = 1000; + int count = 0; + + var trace = hc.GetTrace(nameof(DispatcherRenewJobRequestStopOnJobTokenExpiredExceptions)); + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + TaskAgentJobRequest request = new TaskAgentJobRequest(); + PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(lockUntilProperty); + lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + hc.SetSingleton(_agentServer.Object); + hc.SetSingleton(_configurationStore.Object); + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(() => + { + count++; + if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully) + { + trace.Info("First renew happens."); + } + + if (count < 5) + { + return Task.FromResult(request); + } + else if (count == 5) + { + cancellationTokenSource.CancelAfter(10000); + throw new TaskAgentJobTokenExpiredException(""); + } + else + { + throw new InvalidOperationException("Should not reach here."); + } + }); + + var jobDispatcher = new JobDispatcher(); + jobDispatcher.Initialize(hc); + + await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, firstJobRequestRenewed, cancellationTokenSource.Token); + + Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); + Assert.False(cancellationTokenSource.IsCancellationRequested); + _agentServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(5)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatcherRenewJobRequestRecoverFromExceptions() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + int poolId = 1; + Int64 requestId = 1000; + int count = 0; + + var trace = hc.GetTrace(nameof(DispatcherRenewJobRequestRecoverFromExceptions)); + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + TaskAgentJobRequest request = new TaskAgentJobRequest(); + PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(lockUntilProperty); + lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + hc.SetSingleton(_agentServer.Object); + hc.SetSingleton(_configurationStore.Object); + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(() => + { + count++; + if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully) + { + trace.Info("First renew happens."); + } + + if (count < 5) + { + return Task.FromResult(request); + } + else if (count == 5 || count == 6 || count == 7) + { + throw new TimeoutException(""); + } + else + { + cancellationTokenSource.Cancel(); + return Task.FromResult(request); + } + }); + + var jobDispatcher = new JobDispatcher(); + jobDispatcher.Initialize(hc); + + await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, firstJobRequestRenewed, cancellationTokenSource.Token); + + Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); + Assert.True(cancellationTokenSource.IsCancellationRequested); + _agentServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(8)); + _agentServer.Verify(x => x.RefreshConnectionAsync(RunnerConnectionType.JobRequest, It.IsAny()), Times.Exactly(3)); + _agentServer.Verify(x => x.SetConnectionTimeout(RunnerConnectionType.JobRequest, It.IsAny()), Times.Once); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatcherRenewJobRequestFirstRenewRetrySixTimes() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + int poolId = 1; + Int64 requestId = 1000; + int count = 0; + + var trace = hc.GetTrace(nameof(DispatcherRenewJobRequestFirstRenewRetrySixTimes)); + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + TaskAgentJobRequest request = new TaskAgentJobRequest(); + PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(lockUntilProperty); + lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + hc.SetSingleton(_agentServer.Object); + hc.SetSingleton(_configurationStore.Object); + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(() => + { + count++; + if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully) + { + trace.Info("First renew happens."); + } + + if (count <= 5) + { + throw new TimeoutException(""); + } + else + { + cancellationTokenSource.CancelAfter(10000); + throw new InvalidOperationException("Should not reach here."); + } + }); + + var jobDispatcher = new JobDispatcher(); + jobDispatcher.Initialize(hc); + + await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, firstJobRequestRenewed, cancellationTokenSource.Token); + + Assert.False(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should failed."); + Assert.False(cancellationTokenSource.IsCancellationRequested); + _agentServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(6)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatcherRenewJobRequestStopOnExpiredRequest() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + int poolId = 1; + Int64 requestId = 1000; + int count = 0; + + var trace = hc.GetTrace(nameof(DispatcherRenewJobRequestStopOnExpiredRequest)); + TaskCompletionSource firstJobRequestRenewed = new TaskCompletionSource(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + + TaskAgentJobRequest request = new TaskAgentJobRequest(); + PropertyInfo lockUntilProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(lockUntilProperty); + lockUntilProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + hc.SetSingleton(_agentServer.Object); + hc.SetSingleton(_configurationStore.Object); + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(() => + { + count++; + if (!firstJobRequestRenewed.Task.IsCompletedSuccessfully) + { + trace.Info("First renew happens."); + } + + if (count == 1) + { + return Task.FromResult(request); + } + else if (count < 5) + { + throw new TimeoutException(""); + } + else if (count == 5) + { + lockUntilProperty.SetValue(request, DateTime.UtcNow.Subtract(TimeSpan.FromMinutes(11))); + throw new TimeoutException(""); + } + else + { + cancellationTokenSource.CancelAfter(10000); + throw new InvalidOperationException("Should not reach here."); + } + }); + + var jobDispatcher = new JobDispatcher(); + jobDispatcher.Initialize(hc); + + await jobDispatcher.RenewJobRequestAsync(poolId, requestId, Guid.Empty, firstJobRequestRenewed, cancellationTokenSource.Token); + + Assert.True(firstJobRequestRenewed.Task.IsCompletedSuccessfully, "First renew should succeed."); + Assert.False(cancellationTokenSource.IsCancellationRequested); + _agentServer.Verify(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(5)); + _agentServer.Verify(x => x.RefreshConnectionAsync(RunnerConnectionType.JobRequest, It.IsAny()), Times.Exactly(3)); + _agentServer.Verify(x => x.SetConnectionTimeout(RunnerConnectionType.JobRequest, It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DispatchesOneTimeJobRequest() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + var jobDispatcher = new JobDispatcher(); + hc.SetSingleton(_configurationStore.Object); + hc.SetSingleton(_agentServer.Object); + + hc.EnqueueInstance(_processChannel.Object); + hc.EnqueueInstance(_processInvoker.Object); + + _configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings() { PoolId = 1 }); + jobDispatcher.Initialize(hc); + + var ts = new CancellationTokenSource(); + Pipelines.AgentJobRequestMessage message = CreateJobRequestMessage(); + string strMessage = JsonUtility.ToString(message); + + _processInvoker.Setup(x => x.ExecuteAsync(It.IsAny(), It.IsAny(), "spawnclient 1 2", null, It.IsAny())) + .Returns(Task.FromResult(56)); + + _processChannel.Setup(x => x.StartServer(It.IsAny())) + .Callback((StartProcessDelegate startDel) => { startDel("1", "2"); }); + _processChannel.Setup(x => x.SendAsync(MessageType.NewJobRequest, It.Is(s => s.Equals(strMessage)), It.IsAny())) + .Returns(Task.CompletedTask); + + var request = new TaskAgentJobRequest(); + PropertyInfo sessionIdProperty = request.GetType().GetProperty("LockedUntil", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(request, DateTime.UtcNow.AddMinutes(5)); + + _agentServer.Setup(x => x.RenewAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(request)); + + _agentServer.Setup(x => x.FinishAgentRequestAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(new TaskAgentJobRequest())); + + //Act + jobDispatcher.Run(message, true); + + //Assert + await jobDispatcher.WaitAsync(CancellationToken.None); + + Assert.True(jobDispatcher.RunOnceJobCompleted.Task.IsCompleted, "JobDispatcher should set task complete token for one time agent."); + Assert.True(jobDispatcher.RunOnceJobCompleted.Task.Result, "JobDispatcher should set task complete token to 'TRUE' for one time agent."); + } + } + } +} diff --git a/src/Test/L0/Listener/MessageListenerL0.cs b/src/Test/L0/Listener/MessageListenerL0.cs new file mode 100644 index 00000000000..5324a3e8da3 --- /dev/null +++ b/src/Test/L0/Listener/MessageListenerL0.cs @@ -0,0 +1,214 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Services.Common; +using GitHub.Services.WebApi; +using GitHub.Runner.Listener; +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Listener.Configuration; +using Moq; +using System; +using System.Runtime.CompilerServices; +using System.Threading.Tasks; +using Xunit; +using System.Threading; +using System.Reflection; +using System.Collections.Generic; + +namespace GitHub.Runner.Common.Tests.Listener +{ + public sealed class MessageListenerL0 + { + private RunnerSettings _settings; + private Mock _config; + private Mock _agentServer; + private Mock _credMgr; + private Mock _capabilitiesManager; + + public MessageListenerL0() + { + _settings = new RunnerSettings { AgentId = 1, AgentName = "myagent", PoolId = 123, PoolName = "default", ServerUrl = "http://myserver", WorkFolder = "_work" }; + _config = new Mock(); + _config.Setup(x => x.LoadSettings()).Returns(_settings); + _agentServer = new Mock(); + _credMgr = new Mock(); + _capabilitiesManager = new Mock(); + } + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + TestHostContext tc = new TestHostContext(this, testName); + tc.SetSingleton(_config.Object); + tc.SetSingleton(_agentServer.Object); + tc.SetSingleton(_credMgr.Object); + tc.SetSingleton(_capabilitiesManager.Object); + return tc; + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void CreatesSession() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + _agentServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _capabilitiesManager.Setup(x => x.GetCapabilitiesAsync(_settings, It.IsAny())).Returns(Task.FromResult(new Dictionary())); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + + // Act. + MessageListener listener = new MessageListener(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + trace.Info("result: {0}", result); + + // Assert. + Assert.True(result); + _agentServer + .Verify(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token), Times.Once()); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void DeleteSession() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(expectedSession, Guid.NewGuid()); + + _agentServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _capabilitiesManager.Setup(x => x.GetCapabilitiesAsync(_settings, It.IsAny())).Returns(Task.FromResult(new Dictionary())); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + + // Act. + MessageListener listener = new MessageListener(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.True(result); + + _agentServer + .Setup(x => x.DeleteAgentSessionAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny())) + .Returns(Task.CompletedTask); + await listener.DeleteSessionAsync(); + + //Assert + _agentServer + .Verify(x => x.DeleteAgentSessionAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny()), Times.Once()); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public async void GetNextMessage() + { + using (TestHostContext tc = CreateTestContext()) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = tc.GetTrace(); + + // Arrange. + var expectedSession = new TaskAgentSession(); + PropertyInfo sessionIdProperty = expectedSession.GetType().GetProperty("SessionId", BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); + Assert.NotNull(sessionIdProperty); + sessionIdProperty.SetValue(expectedSession, Guid.NewGuid()); + + _agentServer + .Setup(x => x.CreateAgentSessionAsync( + _settings.PoolId, + It.Is(y => y != null), + tokenSource.Token)) + .Returns(Task.FromResult(expectedSession)); + + _capabilitiesManager.Setup(x => x.GetCapabilitiesAsync(_settings, It.IsAny())).Returns(Task.FromResult(new Dictionary())); + + _credMgr.Setup(x => x.LoadCredentials()).Returns(new VssCredentials()); + + // Act. + MessageListener listener = new MessageListener(); + listener.Initialize(tc); + + bool result = await listener.CreateSessionAsync(tokenSource.Token); + Assert.True(result); + + var arMessages = new TaskAgentMessage[] + { + new TaskAgentMessage + { + Body = "somebody1", + MessageId = 4234, + MessageType = JobRequestMessageTypes.AgentJobRequest + }, + new TaskAgentMessage + { + Body = "somebody2", + MessageId = 4235, + MessageType = JobCancelMessage.MessageType + }, + null, //should be skipped by GetNextMessageAsync implementation + null, + new TaskAgentMessage + { + Body = "somebody3", + MessageId = 4236, + MessageType = JobRequestMessageTypes.AgentJobRequest + } + }; + var messages = new Queue(arMessages); + + _agentServer + .Setup(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), tokenSource.Token)) + .Returns(async (Int32 poolId, Guid sessionId, Int64? lastMessageId, CancellationToken cancellationToken) => + { + await Task.Yield(); + return messages.Dequeue(); + }); + TaskAgentMessage message1 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message2 = await listener.GetNextMessageAsync(tokenSource.Token); + TaskAgentMessage message3 = await listener.GetNextMessageAsync(tokenSource.Token); + Assert.Equal(arMessages[0], message1); + Assert.Equal(arMessages[1], message2); + Assert.Equal(arMessages[4], message3); + + //Assert + _agentServer + .Verify(x => x.GetAgentMessageAsync( + _settings.PoolId, expectedSession.SessionId, It.IsAny(), tokenSource.Token), Times.Exactly(arMessages.Length)); + } + } + } +} diff --git a/src/Test/L0/PagingLoggerL0.cs b/src/Test/L0/PagingLoggerL0.cs new file mode 100644 index 00000000000..5adb89bab64 --- /dev/null +++ b/src/Test/L0/PagingLoggerL0.cs @@ -0,0 +1,131 @@ +using GitHub.Runner.Common.Util; +using Moq; +using System; +using System.IO; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Listener +{ + public sealed class PagingLoggerL0 + { + private const string LogData = "messagemessagemessagemessagemessagemessagemessagemessageXPLATmessagemessagemessagemessagemessagemessagemessagemessage"; + private const int PagesToWrite = 2; + private Mock _jobServerQueue; + + public PagingLoggerL0() + { + _jobServerQueue = new Mock(); + PagingLogger.PagingFolder = "pages_" + Guid.NewGuid().ToString(); + } + + private void CleanLogFolder() + { + using (TestHostContext hc = new TestHostContext(this)) + { + //clean test data if any old test forgot + string pagesFolder = Path.Combine(hc.GetDirectory(WellKnownDirectory.Diag), PagingLogger.PagingFolder); + if (Directory.Exists(pagesFolder)) + { + Directory.Delete(pagesFolder, true); + } + } + } + + //WriteAndShipLog test will write "PagesToWrite" pages of data, + //verify file content on the disk and check if API to ship data is invoked + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void WriteAndShipLog() + { + CleanLogFolder(); + + try + { + //Arrange + using (var hc = new TestHostContext(this)) + { + var pagingLogger = new PagingLogger(); + hc.SetSingleton(_jobServerQueue.Object); + pagingLogger.Initialize(hc); + Guid timeLineId = Guid.NewGuid(); + Guid timeLineRecordId = Guid.NewGuid(); + int totalBytes = PagesToWrite * PagingLogger.PageSize; + int bytesWritten = 0; + int logDataSize = System.Text.Encoding.UTF8.GetByteCount(LogData); + _jobServerQueue.Setup(x => x.QueueFileUpload(timeLineId, timeLineRecordId, It.IsAny(), It.IsAny(), It.IsAny(), true)) + .Callback((Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource) => + { + bool fileExists = File.Exists(path); + Assert.True(fileExists); + + using (var freader = new StreamReader(new FileStream(path, FileMode.Open, FileAccess.Read), System.Text.Encoding.UTF8)) + { + string line; + while ((line = freader.ReadLine()) != null) + { + Assert.True(line.EndsWith(LogData)); + bytesWritten += logDataSize; + } + } + File.Delete(path); + }); + + //Act + int bytesSent = 0; + pagingLogger.Setup(timeLineId, timeLineRecordId); + while (bytesSent < totalBytes) + { + pagingLogger.Write(LogData); + bytesSent += logDataSize; + } + pagingLogger.End(); + + //Assert + _jobServerQueue.Verify(x => x.QueueFileUpload(timeLineId, timeLineRecordId, It.IsAny(), It.IsAny(), It.IsAny(), true), Times.AtLeast(PagesToWrite)); + Assert.Equal(bytesSent, bytesWritten); + } + } + finally + { + //cleanup + CleanLogFolder(); + } + } + + //Try to ship empty log + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ShipEmptyLog() + { + CleanLogFolder(); + + try + { + //Arrange + using (var hc = new TestHostContext(this)) + { + var pagingLogger = new PagingLogger(); + hc.SetSingleton(_jobServerQueue.Object); + pagingLogger.Initialize(hc); + Guid timeLineId = Guid.NewGuid(); + Guid timeLineRecordId = Guid.NewGuid(); + _jobServerQueue.Setup(x => x.QueueFileUpload(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), true)); + + //Act + pagingLogger.Setup(timeLineId, timeLineRecordId); + pagingLogger.End(); + + //Assert + _jobServerQueue.Verify(x => x.QueueFileUpload(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), true), Times.Exactly(0)); + } + } + finally + { + //cleanup + CleanLogFolder(); + } + } + } +} diff --git a/src/Test/L0/ProcessExtensionL0.cs b/src/Test/L0/ProcessExtensionL0.cs new file mode 100644 index 00000000000..02be3991b4e --- /dev/null +++ b/src/Test/L0/ProcessExtensionL0.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using System.Linq; +using Xunit; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class ProcessExtensionL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task SuccessReadProcessEnv() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + string envName = Guid.NewGuid().ToString(); + string envValue = Guid.NewGuid().ToString(); + + Process sleep = null; + try + { +#if OS_WINDOWS + string node = Path.Combine(TestUtil.GetSrcPath(), @"..\_layout\externals\node12\bin\node"); +#else + string node = Path.Combine(TestUtil.GetSrcPath(), @"../_layout/externals/node12/bin/node"); + hc.EnqueueInstance(new ProcessInvokerWrapper()); +#endif + var startInfo = new ProcessStartInfo(node, "-e \"setTimeout(function(){{}}, 15 * 1000);\""); + startInfo.Environment[envName] = envValue; + sleep = Process.Start(startInfo); + + var timeout = Process.GetProcessById(sleep.Id); + while (timeout == null) + { + await Task.Delay(500); + timeout = Process.GetProcessById(sleep.Id); + } + + try + { + trace.Info($"Read env from {timeout.Id}"); + var value = timeout.GetEnvironmentVariable(hc, envName); + if (string.Equals(value, envValue, StringComparison.OrdinalIgnoreCase)) + { + trace.Info($"Find the env."); + return; + } + } + catch (Exception ex) + { + trace.Error(ex); + } + + Assert.True(false, "Fail to retrive process environment variable."); + } + finally + { + sleep?.Kill(); + } + } + } + } +} diff --git a/src/Test/L0/ProcessInvokerL0.cs b/src/Test/L0/ProcessInvokerL0.cs new file mode 100644 index 00000000000..f986d6e5039 --- /dev/null +++ b/src/Test/L0/ProcessInvokerL0.cs @@ -0,0 +1,350 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Xunit; +using GitHub.Runner.Common.Util; +using System.Threading.Channels; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class ProcessInvokerL0 + { +// #if OS_WINDOWS +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Common")] +// public async Task DefaultsToCurrentSystemOemEncoding() +// { +// // This test verifies that the additional code pages encoding provider is registered. +// // By default, only Unicode encodings, ASCII, and code page 28591 are supported. An +// // additional provider must be registered to support the full set of encodings that +// // were included in Full .NET prior to 4.6. +// // +// // For example, on an en-US box, this is required for loading the encoding for the +// // default console output code page '437'. Without loading the correct encoding for +// // code page IBM437, some characters cannot be translated correctly, e.g. write 'ç' +// // from powershell.exe. +// using (TestHostContext hc = new TestHostContext(this)) +// { +// Tracing trace = hc.GetTrace(); +// var processInvoker = new ProcessInvokerWrapper(); +// processInvoker.Initialize(hc); +// var stdout = new List(); +// var stderr = new List(); +// processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => +// { +// stdout.Add(e.Data); +// }; +// processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs e) => +// { +// stderr.Add(e.Data); +// }; +// await processInvoker.ExecuteAsync( +// workingDirectory: "", +// fileName: "powershell.exe", +// arguments: $@"-NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command ""Write-Host 'From STDOUT ''ç''' ; Write-Error 'From STDERR ''ç'''""", +// environment: null, +// requireExitCodeZero: false, +// cancellationToken: CancellationToken.None); +// Assert.Equal(1, stdout.Count); +// Assert.Equal("From STDOUT 'ç'", stdout[0]); +// Assert.True(stderr.Count > 0); +// Assert.True(stderr[0].Contains("From STDERR 'ç'")); +// } +// } +// #endif + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task SuccessExitsWithCodeZero() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + Int32 exitCode = -1; + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.Initialize(hc); +#if OS_WINDOWS + exitCode = await processInvoker.ExecuteAsync("", "cmd.exe", "/c \"dir >nul\"", null, CancellationToken.None); +#else + exitCode = await processInvoker.ExecuteAsync("", "bash", "-c echo .", null, CancellationToken.None); +#endif + + trace.Info("Exit Code: {0}", exitCode); + Assert.Equal(0, exitCode); + } + } + +#if !OS_WINDOWS + //Run a process that normally takes 20sec to finish and cancel it. + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task TestCancel() + { + const int SecondsToRun = 20; + using (TestHostContext hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = hc.GetTrace(); + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.Initialize(hc); + Stopwatch watch = Stopwatch.StartNew(); + Task execTask; +#if OS_WINDOWS + execTask = processInvoker.ExecuteAsync("", "cmd.exe", $"/c \"choice /T {SecondsToRun} /D y\"", null, tokenSource.Token); +#else + execTask = processInvoker.ExecuteAsync("", "bash", $"-c \"sleep {SecondsToRun}s\"", null, tokenSource.Token); +#endif + await Task.Delay(500); + tokenSource.Cancel(); + try + { + await execTask; + } + catch (OperationCanceledException) + { + trace.Info("Get expected OperationCanceledException."); + } + + Assert.True(execTask.IsCompleted); + Assert.True(!execTask.IsFaulted); + Assert.True(execTask.IsCanceled); + watch.Stop(); + long elapsedSeconds = watch.ElapsedMilliseconds / 1000; + +#if ARM + // if cancellation fails, then execution time is more than 15 seconds + // longer time to compensate for a slower ARM environment (e.g. Raspberry Pi) + long expectedSeconds = (SecondsToRun * 3) / 4; +#else + // if cancellation fails, then execution time is more than 10 seconds + long expectedSeconds = SecondsToRun / 2; +#endif + + Assert.True(elapsedSeconds <= expectedSeconds, $"cancellation failed, because task took too long to run. {elapsedSeconds}"); + } + } +#endif + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task RedirectSTDINCloseStream() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + Int32 exitCode = -1; + Channel redirectSTDIN = Channel.CreateUnbounded(new UnboundedChannelOptions() { SingleReader = true, SingleWriter = true }); + List stdout = new List(); + redirectSTDIN.Writer.TryWrite("Single line of STDIN"); + + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => + { + stdout.Add(e.Data); + }; + + processInvoker.Initialize(hc); +#if OS_WINDOWS + var proc = processInvoker.ExecuteAsync("", "cmd.exe", "/c more", null, false, null, false, redirectSTDIN, false, false, cancellationTokenSource.Token); +#else + var proc = processInvoker.ExecuteAsync("", "bash", "-c \"read input; echo $input; read input; echo $input; read input; echo $input;\"", null, false, null, false, redirectSTDIN, false, false, cancellationTokenSource.Token); +#endif + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + await Task.Delay(100); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + await Task.Delay(100); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + cancellationTokenSource.CancelAfter(100); + + try + { + exitCode = await proc; + trace.Info("Exit Code: {0}", exitCode); + } + catch (Exception ex) + { + trace.Error(ex); + } + + trace.Info("STDOUT: {0}", string.Join(Environment.NewLine, stdout)); + Assert.False(stdout.Contains("More line of STDIN"), "STDIN should be closed after first input line."); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task RedirectSTDINKeepStreamOpen() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); + Int32 exitCode = -1; + Channel redirectSTDIN = Channel.CreateUnbounded(new UnboundedChannelOptions() { SingleReader = true, SingleWriter = true }); + List stdout = new List(); + redirectSTDIN.Writer.TryWrite("Single line of STDIN"); + + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => + { + stdout.Add(e.Data); + }; + + processInvoker.Initialize(hc); +#if OS_WINDOWS + var proc = processInvoker.ExecuteAsync("", "cmd.exe", "/c more", null, false, null, false, redirectSTDIN, false, true, cancellationTokenSource.Token); +#else + var proc = processInvoker.ExecuteAsync("", "bash", "-c \"read input; echo $input; read input; echo $input; read input; echo $input;\"", null, false, null, false, redirectSTDIN, false, true, cancellationTokenSource.Token); +#endif + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + await Task.Delay(100); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + await Task.Delay(100); + redirectSTDIN.Writer.TryWrite("More line of STDIN"); + cancellationTokenSource.CancelAfter(100); + + try + { + exitCode = await proc; + trace.Info("Exit Code: {0}", exitCode); + } + catch (Exception ex) + { + trace.Error(ex); + } + + trace.Info("STDOUT: {0}", string.Join(Environment.NewLine, stdout)); + Assert.True(stdout.Contains("More line of STDIN"), "STDIN should keep open and accept more inputs after first input line."); + } + } + +#if OS_LINUX + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task OomScoreAdjIsWriten_Default() + { + // We are on a system that supports oom_score_adj in procfs as assumed by ProcessInvoker + string testProcPath = $"/proc/{Process.GetCurrentProcess().Id}/oom_score_adj"; + if (File.Exists(testProcPath)) + { + using (TestHostContext hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = hc.GetTrace(); + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.Initialize(hc); + int oomScoreAdj = -9999; + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => + { + oomScoreAdj = int.Parse(e.Data); + tokenSource.Cancel(); + }; + try + { + var proc = await processInvoker.ExecuteAsync("", "bash", "-c \"cat /proc/$$/oom_score_adj\"", null, false, null, false, null, false, false, + highPriorityProcess: false, + cancellationToken: tokenSource.Token); + Assert.Equal(oomScoreAdj, 500); + } + catch (OperationCanceledException) + { + trace.Info("Caught expected OperationCanceledException"); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task OomScoreAdjIsWriten_FromEnv() + { + // We are on a system that supports oom_score_adj in procfs as assumed by ProcessInvoker + string testProcPath = $"/proc/{Process.GetCurrentProcess().Id}/oom_score_adj"; + if (File.Exists(testProcPath)) + { + using (TestHostContext hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = hc.GetTrace(); + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.Initialize(hc); + int oomScoreAdj = -9999; + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => + { + oomScoreAdj = int.Parse(e.Data); + tokenSource.Cancel(); + }; + try + { + var proc = await processInvoker.ExecuteAsync("", "bash", "-c \"cat /proc/$$/oom_score_adj\"", + new Dictionary { {"PIPELINE_JOB_OOMSCOREADJ", "1234"} }, + false, null, false, null, false, false, + highPriorityProcess: false, + cancellationToken: tokenSource.Token); + Assert.Equal(oomScoreAdj, 1234); + } + catch (OperationCanceledException) + { + trace.Info("Caught expected OperationCanceledException"); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task OomScoreAdjIsInherited() + { + // We are on a system that supports oom_score_adj in procfs as assumed by ProcessInvoker + string testProcPath = $"/proc/{Process.GetCurrentProcess().Id}/oom_score_adj"; + if (File.Exists(testProcPath)) + { + int testProcOomScoreAdj = 123; + File.WriteAllText(testProcPath, testProcOomScoreAdj.ToString()); + using (TestHostContext hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + Tracing trace = hc.GetTrace(); + var processInvoker = new ProcessInvokerWrapper(); + processInvoker.Initialize(hc); + int oomScoreAdj = -9999; + processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs e) => + { + oomScoreAdj = int.Parse(e.Data); + tokenSource.Cancel(); + }; + try + { + var proc = await processInvoker.ExecuteAsync("", "bash", "-c \"cat /proc/$$/oom_score_adj\"", null, false, null, false, null, false, false, + highPriorityProcess: true, + cancellationToken: tokenSource.Token); + Assert.Equal(oomScoreAdj, 123); + } + catch (OperationCanceledException) + { + trace.Info("Caught expected OperationCanceledException"); + } + } + } + } +#endif + } +} diff --git a/src/Test/L0/ProxyConfigL0.cs b/src/Test/L0/ProxyConfigL0.cs new file mode 100644 index 00000000000..673b5c6e313 --- /dev/null +++ b/src/Test/L0/ProxyConfigL0.cs @@ -0,0 +1,116 @@ +using GitHub.Runner.Common.Util; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.RegularExpressions; +using Xunit; +using System; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class ProxyConfigL0 + { + private static readonly Regex NewHttpClientHandlerRegex = new Regex("New\\s+HttpClientHandler\\s*\\(", RegexOptions.Compiled | RegexOptions.IgnoreCase); + private static readonly Regex NewHttpClientRegex = new Regex("New\\s+HttpClient\\s*\\(\\s*\\)", RegexOptions.Compiled | RegexOptions.IgnoreCase); + private static readonly List SkippedFiles = new List() + { + "Runner.Common\\HostContext.cs", + "Runner.Common/HostContext.cs" + }; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void IsNotUseRawHttpClientHandler() + { + List sourceFiles = Directory.GetFiles( + TestUtil.GetProjectPath("Runner.Common"), + "*.cs", + SearchOption.AllDirectories).ToList(); + sourceFiles.AddRange(Directory.GetFiles( + TestUtil.GetProjectPath("Runner.Listener"), + "*.cs", + SearchOption.AllDirectories)); + sourceFiles.AddRange(Directory.GetFiles( + TestUtil.GetProjectPath("Runner.Worker"), + "*.cs", + SearchOption.AllDirectories)); + + List badCode = new List(); + foreach (string sourceFile in sourceFiles) + { + // Skip skipped files. + if (SkippedFiles.Any(s => sourceFile.Contains(s))) + { + continue; + } + + // Skip files in the obj directory. + if (sourceFile.Contains(StringUtil.Format("{0}obj{0}", Path.DirectorySeparatorChar))) + { + continue; + } + + int lineCount = 0; + foreach (string line in File.ReadAllLines(sourceFile)) + { + lineCount++; + if (NewHttpClientHandlerRegex.IsMatch(line)) + { + badCode.Add($"{sourceFile} (line {lineCount})"); + } + } + } + + Assert.True(badCode.Count == 0, $"The following code is using Raw HttpClientHandler() which will not follow the proxy setting agent have. Please use HostContext.CreateHttpClientHandler() instead.\n {string.Join("\n", badCode)}"); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void IsNotUseRawHttpClient() + { + List sourceFiles = Directory.GetFiles( + TestUtil.GetProjectPath("Runner.Common"), + "*.cs", + SearchOption.AllDirectories).ToList(); + sourceFiles.AddRange(Directory.GetFiles( + TestUtil.GetProjectPath("Runner.Listener"), + "*.cs", + SearchOption.AllDirectories)); + sourceFiles.AddRange(Directory.GetFiles( + TestUtil.GetProjectPath("Runner.Worker"), + "*.cs", + SearchOption.AllDirectories)); + + List badCode = new List(); + foreach (string sourceFile in sourceFiles) + { + // Skip skipped files. + if (SkippedFiles.Any(s => sourceFile.Contains(s))) + { + continue; + } + + // Skip files in the obj directory. + if (sourceFile.Contains(StringUtil.Format("{0}obj{0}", Path.DirectorySeparatorChar))) + { + continue; + } + + int lineCount = 0; + foreach (string line in File.ReadAllLines(sourceFile)) + { + lineCount++; + if (NewHttpClientRegex.IsMatch(line)) + { + badCode.Add($"{sourceFile} (line {lineCount})"); + } + } + } + + Assert.True(badCode.Count == 0, $"The following code is using Raw HttpClient() which will not follow the proxy setting agent have. Please use New HttpClient(HostContext.CreateHttpClientHandler()) instead.\n {string.Join("\n", badCode)}"); + } + } +} diff --git a/src/Test/L0/ServiceInterfacesL0.cs b/src/Test/L0/ServiceInterfacesL0.cs new file mode 100644 index 00000000000..e3e278a0d72 --- /dev/null +++ b/src/Test/L0/ServiceInterfacesL0.cs @@ -0,0 +1,115 @@ +using GitHub.Runner.Listener; +using GitHub.Runner.Common.Capabilities; +using GitHub.Runner.Listener.Configuration; +using GitHub.Runner.Worker; +using GitHub.Runner.Worker.Handlers; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using Xunit; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class ServiceInterfacesL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Agent")] + public void AgentInterfacesSpecifyDefaultImplementation() + { + // Validate all interfaces in the Listener assembly define a valid service locator attribute. + // Otherwise, the interface needs to whitelisted. + var whitelist = new[] + { + typeof(ICredentialProvider) + }; + Validate( + assembly: typeof(IMessageListener).GetTypeInfo().Assembly, + whitelist: whitelist); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void CommonInterfacesSpecifyDefaultImplementation() + { + // Validate all interfaces in the Common assembly define a valid service locator attribute. + // Otherwise, the interface needs to whitelisted. + var whitelist = new[] + { + typeof(IRunnerService), + typeof(ICredentialProvider), + typeof(IExtension), + typeof(IHostContext), + typeof(ITraceManager), + typeof(IThrottlingReporter), + typeof(ICapabilitiesProvider) + }; + Validate( + assembly: typeof(IHostContext).GetTypeInfo().Assembly, + whitelist: whitelist); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void WorkerInterfacesSpecifyDefaultImplementation() + { + // Validate all interfaces in the Worker assembly define a valid service locator attribute. + // Otherwise, the interface needs to whitelisted. + var whitelist = new[] + { + typeof(IActionCommandExtension), + typeof(IExecutionContext), + typeof(IHandler), + typeof(IJobExtension), + typeof(IStep), + typeof(IStepHost), + typeof(IDiagnosticLogManager), + typeof(IEnvironmentContextData) + }; + Validate( + assembly: typeof(IStepsRunner).GetTypeInfo().Assembly, + whitelist: whitelist); + } + + private static void Validate(Assembly assembly, params Type[] whitelist) + { + // Iterate over all non-whitelisted interfaces contained within the assembly. + IDictionary w = whitelist.ToDictionary(x => x.GetTypeInfo()); + foreach (TypeInfo interfaceTypeInfo in assembly.DefinedTypes.Where(x => x.IsInterface && !w.ContainsKey(x))) + { + // Temporary hack due to shared code copied in two places. + if (interfaceTypeInfo.FullName.StartsWith("GitHub.DistributedTask")) + { + continue; + } + + if (interfaceTypeInfo.FullName.Contains("IConverter")){ + continue; + } + + // Assert the ServiceLocatorAttribute is defined on the interface. + CustomAttributeData attribute = + interfaceTypeInfo + .CustomAttributes + .SingleOrDefault(x => x.AttributeType == typeof(ServiceLocatorAttribute)); + Assert.True(attribute != null, $"Missing {nameof(ServiceLocatorAttribute)} for interface '{interfaceTypeInfo.FullName}'. Add the attribute to the interface or whitelist the interface in the test."); + + // Assert the interface is mapped to a concrete type. + CustomAttributeNamedArgument defaultArg = + attribute + .NamedArguments + .SingleOrDefault(x => String.Equals(x.MemberName, ServiceLocatorAttribute.DefaultPropertyName, StringComparison.Ordinal)); + Type concreteType = defaultArg.TypedValue.Value as Type; + string invalidConcreteTypeMessage = $"Invalid Default parameter on {nameof(ServiceLocatorAttribute)} for the interface '{interfaceTypeInfo.FullName}'. The default implementation must not be null, must not be an interface, must be a class, and must implement the interface '{interfaceTypeInfo.FullName}'."; + Assert.True(concreteType != null, invalidConcreteTypeMessage); + TypeInfo concreteTypeInfo = concreteType.GetTypeInfo(); + Assert.False(concreteTypeInfo.IsInterface, invalidConcreteTypeMessage); + Assert.True(concreteTypeInfo.IsClass, invalidConcreteTypeMessage); + Assert.True(concreteTypeInfo.ImplementedInterfaces.Any(x => x.GetTypeInfo() == interfaceTypeInfo), invalidConcreteTypeMessage); + } + } + } +} diff --git a/src/Test/L0/TestHostContext.cs b/src/Test/L0/TestHostContext.cs new file mode 100644 index 00000000000..0d821b17721 --- /dev/null +++ b/src/Test/L0/TestHostContext.cs @@ -0,0 +1,373 @@ +using GitHub.Runner.Common.Util; +using System; +using System.Collections.Concurrent; +using System.Globalization; +using System.IO; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using System.Runtime.Loader; +using System.Reflection; +using System.Collections.Generic; +using GitHub.DistributedTask.Logging; +using System.Net.Http.Headers; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests +{ + public sealed class TestHostContext : IHostContext, IDisposable + { + private readonly ConcurrentDictionary> _serviceInstances = new ConcurrentDictionary>(); + private readonly ConcurrentDictionary _serviceSingletons = new ConcurrentDictionary(); + private readonly ITraceManager _traceManager; + private readonly Terminal _term; + private readonly SecretMasker _secretMasker; + private CancellationTokenSource _agentShutdownTokenSource = new CancellationTokenSource(); + private string _suiteName; + private string _testName; + private Tracing _trace; + private AssemblyLoadContext _loadContext; + private string _tempDirectoryRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("D")); + private StartupType _startupType; + public event EventHandler Unloading; + public CancellationToken RunnerShutdownToken => _agentShutdownTokenSource.Token; + public ShutdownReason RunnerShutdownReason { get; private set; } + public ISecretMasker SecretMasker => _secretMasker; + public TestHostContext(object testClass, [CallerMemberName] string testName = "") + { + ArgUtil.NotNull(testClass, nameof(testClass)); + ArgUtil.NotNullOrEmpty(testName, nameof(testName)); + _loadContext = AssemblyLoadContext.GetLoadContext(typeof(TestHostContext).GetTypeInfo().Assembly); + _loadContext.Unloading += LoadContext_Unloading; + _testName = testName; + + // Trim the test assembly's root namespace from the test class's full name. + _suiteName = testClass.GetType().FullName.Substring( + startIndex: typeof(Tests.TestHostContext).FullName.LastIndexOf(nameof(TestHostContext))); + _suiteName = _suiteName.Replace(".", "_"); + + // Setup the trace manager. + TraceFileName = Path.Combine( + Path.Combine(TestUtil.GetSrcPath(), "Test", "TestLogs"), + $"trace_{_suiteName}_{_testName}.log"); + if (File.Exists(TraceFileName)) + { + File.Delete(TraceFileName); + } + + var traceListener = new HostTraceListener(TraceFileName); + _secretMasker = new SecretMasker(); + _secretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape); + _secretMasker.AddValueEncoder(ValueEncoders.UriDataEscape); + _traceManager = new TraceManager(traceListener, _secretMasker); + _trace = GetTrace(nameof(TestHostContext)); + + // inject a terminal in silent mode so all console output + // goes to the test trace file + _term = new Terminal(); + _term.Silent = true; + SetSingleton(_term); + EnqueueInstance(_term); + +#if !OS_WINDOWS + string eulaFile = Path.Combine(GetDirectory(WellKnownDirectory.Externals), Constants.Path.TeeDirectory, "license.html"); + Directory.CreateDirectory(GetDirectory(WellKnownDirectory.Externals)); + Directory.CreateDirectory(Path.Combine(GetDirectory(WellKnownDirectory.Externals), Constants.Path.TeeDirectory)); + File.WriteAllText(eulaFile, "testeulafile"); +#endif + } + + public CultureInfo DefaultCulture { get; private set; } + + public RunMode RunMode { get; set; } + + public string TraceFileName { get; private set; } + + public StartupType StartupType + { + get + { + return _startupType; + } + set + { + _startupType = value; + } + } + + public ProductInfoHeaderValue UserAgent => new ProductInfoHeaderValue("L0Test", "0.0"); + + public async Task Delay(TimeSpan delay, CancellationToken token) + { + await Task.Delay(TimeSpan.Zero); + } + + public T CreateService() where T : class, IRunnerService + { + _trace.Verbose($"Create service: '{typeof(T).Name}'"); + + // Dequeue a registered instance. + object service; + ConcurrentQueue queue; + if (!_serviceInstances.TryGetValue(typeof(T), out queue) || + !queue.TryDequeue(out service)) + { + throw new Exception($"Unable to dequeue a registered instance for type '{typeof(T).FullName}'."); + } + + var s = service as T; + s.Initialize(this); + return s; + } + + public T GetService() where T : class, IRunnerService + { + _trace.Verbose($"Get service: '{typeof(T).Name}'"); + + // Get the registered singleton instance. + object service; + if (!_serviceSingletons.TryGetValue(typeof(T), out service)) + { + throw new Exception($"Singleton instance not registered for type '{typeof(T).FullName}'."); + } + + T s = service as T; + s.Initialize(this); + return s; + } + + public void EnqueueInstance(T instance) where T : class, IRunnerService + { + // Enqueue a service instance to be returned by CreateService. + if (object.ReferenceEquals(instance, null)) + { + throw new ArgumentNullException(nameof(instance)); + } + + ConcurrentQueue queue = _serviceInstances.GetOrAdd( + key: typeof(T), + valueFactory: x => new ConcurrentQueue()); + queue.Enqueue(instance); + } + + public void SetDefaultCulture(string name) + { + DefaultCulture = new CultureInfo(name); + } + + public void SetSingleton(T singleton) where T : class, IRunnerService + { + // Set the singleton instance to be returned by GetService. + if (object.ReferenceEquals(singleton, null)) + { + throw new ArgumentNullException(nameof(singleton)); + } + + _serviceSingletons[typeof(T)] = singleton; + } + + public string GetDirectory(WellKnownDirectory directory) + { + string path; + switch (directory) + { + case WellKnownDirectory.Bin: + path = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location); + break; + + case WellKnownDirectory.Diag: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + Constants.Path.DiagDirectory); + break; + + case WellKnownDirectory.Externals: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + Constants.Path.ExternalsDirectory); + break; + + case WellKnownDirectory.Root: + path = new DirectoryInfo(GetDirectory(WellKnownDirectory.Bin)).Parent.FullName; + break; + + case WellKnownDirectory.Temp: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.TempDirectory); + break; + + case WellKnownDirectory.Actions: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.ActionsDirectory); + break; + + case WellKnownDirectory.Tools: + path = Environment.GetEnvironmentVariable("AGENT_TOOLSDIRECTORY") ?? Environment.GetEnvironmentVariable(Constants.Variables.Agent.ToolsDirectory); + if (string.IsNullOrEmpty(path)) + { + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.ToolDirectory); + } + break; + + case WellKnownDirectory.Update: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Work), + Constants.Path.UpdateDirectory); + break; + + case WellKnownDirectory.Work: + path = Path.Combine( + _tempDirectoryRoot, + WellKnownDirectory.Work.ToString()); + break; + + default: + throw new NotSupportedException($"Unexpected well known directory: '{directory}'"); + } + + _trace.Info($"Well known directory '{directory}': '{path}'"); + return path; + } + + public string GetConfigFile(WellKnownConfigFile configFile) + { + string path; + switch (configFile) + { + case WellKnownConfigFile.Runner: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".agent"); + break; + + case WellKnownConfigFile.Credentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credentials"); + break; + + case WellKnownConfigFile.RSACredentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credentials_rsaparams"); + break; + + case WellKnownConfigFile.Service: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".service"); + break; + + case WellKnownConfigFile.CredentialStore: +#if OS_OSX + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credential_store.keychain"); +#else + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".credential_store"); +#endif + break; + + case WellKnownConfigFile.Certificates: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".certificates"); + break; + + case WellKnownConfigFile.Proxy: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".proxy"); + break; + + case WellKnownConfigFile.ProxyCredentials: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".proxycredentials"); + break; + + case WellKnownConfigFile.ProxyBypass: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".proxybypass"); + break; + + case WellKnownConfigFile.Options: + path = Path.Combine( + GetDirectory(WellKnownDirectory.Root), + ".options"); + break; + default: + throw new NotSupportedException($"Unexpected well known config file: '{configFile}'"); + } + + _trace.Info($"Well known config file '{configFile}': '{path}'"); + return path; + } + + // simple convenience factory so each suite/test gets a different trace file per run + public Tracing GetTrace() + { + Tracing trace = GetTrace($"{_suiteName}_{_testName}"); + trace.Info($"Starting {_testName}"); + return trace; + } + + public Tracing GetTrace(string name) + { + return _traceManager[name]; + } + + public void ShutdownRunner(ShutdownReason reason) + { + ArgUtil.NotNull(reason, nameof(reason)); + RunnerShutdownReason = reason; + _agentShutdownTokenSource.Cancel(); + } + + public void WritePerfCounter(string counter) + { + } + + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + private void Dispose(bool disposing) + { + if (disposing) + { + if (_loadContext != null) + { + _loadContext.Unloading -= LoadContext_Unloading; + _loadContext = null; + } + _traceManager?.Dispose(); + try + { + Directory.Delete(_tempDirectoryRoot); + } + catch (Exception) + { + // eat exception on dispose + } + } + } + + private void LoadContext_Unloading(AssemblyLoadContext obj) + { + if (Unloading != null) + { + Unloading(this, null); + } + } + } +} diff --git a/src/Test/L0/TestUtil.cs b/src/Test/L0/TestUtil.cs new file mode 100644 index 00000000000..101dedee783 --- /dev/null +++ b/src/Test/L0/TestUtil.cs @@ -0,0 +1,39 @@ +using GitHub.Runner.Common.Util; +using System.IO; +using Xunit; +using System; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests +{ + public static class TestUtil + { + private const string Src = "src"; + private const string TestData = "TestData"; + + public static string GetProjectPath(string name = "Test") + { + ArgUtil.NotNullOrEmpty(name, nameof(name)); + string projectDir = Path.Combine( + GetSrcPath(), + name); + Assert.True(Directory.Exists(projectDir)); + return projectDir; + } + + public static string GetSrcPath() + { + string srcDir = Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"); + ArgUtil.Directory(srcDir, nameof(srcDir)); + Assert.Equal(Src, Path.GetFileName(srcDir)); + return srcDir; + } + + public static string GetTestDataPath() + { + string testDataDir = Path.Combine(GetProjectPath(), TestData); + Assert.True(Directory.Exists(testDataDir)); + return testDataDir; + } + } +} diff --git a/src/Test/L0/Util/ArgUtilL0.cs b/src/Test/L0/Util/ArgUtilL0.cs new file mode 100644 index 00000000000..7ede3bed7fd --- /dev/null +++ b/src/Test/L0/Util/ArgUtilL0.cs @@ -0,0 +1,148 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public sealed class ArgUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_MatchesObjectEquality() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + string expected = "Some string".ToLower(); // ToLower is required to avoid reference equality + string actual = "Some string".ToLower(); // due to compile-time string interning. + + // Act/Assert. + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_MatchesReferenceEquality() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + object expected = new object(); + object actual = expected; + + // Act/Assert. + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_MatchesStructEquality() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + int expected = 123; + int actual = expected; + + // Act/Assert. + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_ThrowsWhenActualObjectIsNull() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + object expected = new object(); + object actual = null; + + // Act/Assert. + Assert.Throws(() => + { + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + }); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_ThrowsWhenExpectedObjectIsNull() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + object expected = null; + object actual = new object(); + + // Act/Assert. + Assert.Throws(() => + { + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + }); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_ThrowsWhenObjectsAreNotEqual() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + object expected = new object(); + object actual = new object(); + + // Act/Assert. + Assert.Throws(() => + { + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + }); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Equal_ThrowsWhenStructsAreNotEqual() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + int expected = 123; + int actual = 456; + + // Act/Assert. + Assert.Throws(() => + { + ArgUtil.Equal(expected: expected, actual: actual, name: "Some parameter"); + }); + } + } + } +} diff --git a/src/Test/L0/Util/IOUtilL0.cs b/src/Test/L0/Util/IOUtilL0.cs new file mode 100644 index 00000000000..6a990c7898e --- /dev/null +++ b/src/Test/L0/Util/IOUtilL0.cs @@ -0,0 +1,957 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public sealed class IOUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Delete_DeletesDirectory() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string file = Path.Combine(directory, "some file"); + try + { + Directory.CreateDirectory(directory); + File.WriteAllText(path: file, contents: "some contents"); + + // Act. + IOUtil.Delete(directory, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(directory)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void Delete_DeletesFile() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string file = Path.Combine(directory, "some file"); + try + { + Directory.CreateDirectory(directory); + File.WriteAllText(path: file, contents: "some contents"); + + // Act. + IOUtil.Delete(file, CancellationToken.None); + + // Assert. + Assert.False(File.Exists(file)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteDirectory_DeletesDirectoriesRecursively() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a grandchild directory. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + Directory.CreateDirectory(Path.Combine(directory, "some child directory", "some grandchild directory")); + + // Act. + IOUtil.DeleteDirectory(directory, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(directory)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task DeleteDirectory_DeletesDirectoryReparsePointChain() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create the following structure: + // randomDir + // randomDir/ -> + // randomDir/ -> + // randomDir/ -> + // randomDir/ -> + // randomDir/ -> targetDir + // randomDir/targetDir + // randomDir/targetDir/file.txt + // + // The purpose of this test is to verify that DirectoryNotFoundException is gracefully handled when + // deleting a chain of reparse point directories. Since the reparse points are named in a random order, + // the DirectoryNotFoundException case is likely to be encountered. + string randomDir = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + string targetDir = Directory.CreateDirectory(Path.Combine(randomDir, "targetDir")).FullName; + string file = Path.Combine(targetDir, "file.txt"); + File.WriteAllText(path: file, contents: "some contents"); + string linkDir1 = Path.Combine(randomDir, $"{Guid.NewGuid()}_linkDir1"); + string linkDir2 = Path.Combine(randomDir, $"{Guid.NewGuid()}_linkDir2"); + string linkDir3 = Path.Combine(randomDir, $"{Guid.NewGuid()}_linkDir3"); + string linkDir4 = Path.Combine(randomDir, $"{Guid.NewGuid()}_linkDir4"); + string linkDir5 = Path.Combine(randomDir, $"{Guid.NewGuid()}_linkDir5"); + await CreateDirectoryReparsePoint(context: hc, link: linkDir1, target: linkDir2); + await CreateDirectoryReparsePoint(context: hc, link: linkDir2, target: linkDir3); + await CreateDirectoryReparsePoint(context: hc, link: linkDir3, target: linkDir4); + await CreateDirectoryReparsePoint(context: hc, link: linkDir4, target: linkDir5); + await CreateDirectoryReparsePoint(context: hc, link: linkDir5, target: targetDir); + + // Sanity check to verify the link was created properly: + Assert.True(Directory.Exists(linkDir1)); + Assert.True(new DirectoryInfo(linkDir1).Attributes.HasFlag(FileAttributes.ReparsePoint)); + Assert.True(File.Exists(Path.Combine(linkDir1, "file.txt"))); + + // Act. + IOUtil.DeleteDirectory(randomDir, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(linkDir1)); + Assert.False(Directory.Exists(targetDir)); + Assert.False(File.Exists(file)); + Assert.False(Directory.Exists(randomDir)); + } + finally + { + // Cleanup. + if (Directory.Exists(randomDir)) + { + Directory.Delete(randomDir, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task DeleteDirectory_DeletesDirectoryReparsePointsBeforeDirectories() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create the following structure: + // randomDir + // randomDir/linkDir -> targetDir + // randomDir/targetDir + // randomDir/targetDir/file.txt + // + // The accuracy of this test relies on an assumption that IOUtil sorts the directories in + // descending order before deleting them - either by length or by default sort order. + string randomDir = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + string targetDir = Directory.CreateDirectory(Path.Combine(randomDir, "targetDir")).FullName; + string file = Path.Combine(targetDir, "file.txt"); + File.WriteAllText(path: file, contents: "some contents"); + string linkDir = Path.Combine(randomDir, "linkDir"); + await CreateDirectoryReparsePoint(context: hc, link: linkDir, target: targetDir); + + // Sanity check to verify the link was created properly: + Assert.True(Directory.Exists(linkDir)); + Assert.True(new DirectoryInfo(linkDir).Attributes.HasFlag(FileAttributes.ReparsePoint)); + Assert.True(File.Exists(Path.Combine(linkDir, "file.txt"))); + + // Act. + IOUtil.DeleteDirectory(randomDir, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(linkDir)); + Assert.False(Directory.Exists(targetDir)); + Assert.False(File.Exists(file)); + Assert.False(Directory.Exists(randomDir)); + } + finally + { + // Cleanup. + if (Directory.Exists(randomDir)) + { + Directory.Delete(randomDir, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteDirectory_DeletesFilesRecursively() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a grandchild file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + string file = Path.Combine(directory, "some subdirectory", "some file"); + Directory.CreateDirectory(Path.GetDirectoryName(file)); + File.WriteAllText(path: file, contents: "some contents"); + + // Act. + IOUtil.DeleteDirectory(directory, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(directory)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteDirectory_DeletesReadOnlyDirectories() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a read-only subdirectory. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string subdirectory = Path.Combine(directory, "some subdirectory"); + try + { + var subdirectoryInfo = new DirectoryInfo(subdirectory); + subdirectoryInfo.Create(); + subdirectoryInfo.Attributes = subdirectoryInfo.Attributes | FileAttributes.ReadOnly; + + // Act. + IOUtil.DeleteDirectory(directory, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(directory)); + } + finally + { + // Cleanup. + var subdirectoryInfo = new DirectoryInfo(subdirectory); + if (subdirectoryInfo.Exists) + { + subdirectoryInfo.Attributes = subdirectoryInfo.Attributes & ~FileAttributes.ReadOnly; + } + + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteDirectory_DeletesReadOnlyRootDirectory() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a read-only directory. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + var directoryInfo = new DirectoryInfo(directory); + directoryInfo.Create(); + directoryInfo.Attributes = directoryInfo.Attributes | FileAttributes.ReadOnly; + + // Act. + IOUtil.DeleteDirectory(directory, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(directory)); + } + finally + { + // Cleanup. + var directoryInfo = new DirectoryInfo(directory); + if (directoryInfo.Exists) + { + directoryInfo.Attributes = directoryInfo.Attributes & ~FileAttributes.ReadOnly; + directoryInfo.Delete(); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteDirectory_DeletesReadOnlyFiles() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a read-only file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string file = Path.Combine(directory, "some file"); + try + { + Directory.CreateDirectory(directory); + File.WriteAllText(path: file, contents: "some contents"); + File.SetAttributes(file, File.GetAttributes(file) | FileAttributes.ReadOnly); + + // Act. + IOUtil.DeleteDirectory(directory, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(directory)); + } + finally + { + // Cleanup. + if (File.Exists(file)) + { + File.SetAttributes(file, File.GetAttributes(file) & ~FileAttributes.ReadOnly); + } + + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task DeleteDirectory_DoesNotFollowDirectoryReparsePoint() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create the following structure: + // randomDir + // randomDir/targetDir + // randomDir/targetDir/file.txt + // randomDir/linkDir -> targetDir + string randomDir = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + string targetDir = Directory.CreateDirectory(Path.Combine(randomDir, "targetDir")).FullName; + string file = Path.Combine(targetDir, "file.txt"); + File.WriteAllText(path: file, contents: "some contents"); + string linkDir = Path.Combine(randomDir, "linkDir"); + await CreateDirectoryReparsePoint(context: hc, link: linkDir, target: targetDir); + + // Sanity check to verify the link was created properly: + Assert.True(Directory.Exists(linkDir)); + Assert.True(new DirectoryInfo(linkDir).Attributes.HasFlag(FileAttributes.ReparsePoint)); + Assert.True(File.Exists(Path.Combine(linkDir, "file.txt"))); + + // Act. + IOUtil.DeleteDirectory(linkDir, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(linkDir)); + Assert.True(Directory.Exists(targetDir)); + Assert.True(File.Exists(file)); + } + finally + { + // Cleanup. + if (Directory.Exists(randomDir)) + { + Directory.Delete(randomDir, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task DeleteDirectory_DoesNotFollowNestLevel1DirectoryReparsePoint() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create the following structure: + // randomDir + // randomDir/targetDir + // randomDir/targetDir/file.txt + // randomDir/subDir + // randomDir/subDir/linkDir -> ../targetDir + string randomDir = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + string targetDir = Directory.CreateDirectory(Path.Combine(randomDir, "targetDir")).FullName; + string file = Path.Combine(targetDir, "file.txt"); + File.WriteAllText(path: file, contents: "some contents"); + string subDir = Directory.CreateDirectory(Path.Combine(randomDir, "subDir")).FullName; + string linkDir = Path.Combine(subDir, "linkDir"); + await CreateDirectoryReparsePoint(context: hc, link: linkDir, target: targetDir); + + // Sanity check to verify the link was created properly: + Assert.True(Directory.Exists(linkDir)); + Assert.True(new DirectoryInfo(linkDir).Attributes.HasFlag(FileAttributes.ReparsePoint)); + Assert.True(File.Exists(Path.Combine(linkDir, "file.txt"))); + + // Act. + IOUtil.DeleteDirectory(subDir, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(subDir)); + Assert.True(Directory.Exists(targetDir)); + Assert.True(File.Exists(file)); + } + finally + { + // Cleanup. + if (Directory.Exists(randomDir)) + { + Directory.Delete(randomDir, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public async Task DeleteDirectory_DoesNotFollowNestLevel2DirectoryReparsePoint() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create the following structure: + // randomDir + // randomDir/targetDir + // randomDir/targetDir/file.txt + // randomDir/subDir1 + // randomDir/subDir1/subDir2 + // randomDir/subDir1/subDir2/linkDir -> ../../targetDir + string randomDir = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + string targetDir = Directory.CreateDirectory(Path.Combine(randomDir, "targetDir")).FullName; + string file = Path.Combine(targetDir, "file.txt"); + File.WriteAllText(path: file, contents: "some contents"); + string subDir1 = Directory.CreateDirectory(Path.Combine(randomDir, "subDir1")).FullName; + string subDir2 = Directory.CreateDirectory(Path.Combine(subDir1, "subDir2")).FullName; + string linkDir = Path.Combine(subDir2, "linkDir"); + await CreateDirectoryReparsePoint(context: hc, link: linkDir, target: targetDir); + + // Sanity check to verify the link was created properly: + Assert.True(Directory.Exists(linkDir)); + Assert.True(new DirectoryInfo(linkDir).Attributes.HasFlag(FileAttributes.ReparsePoint)); + Assert.True(File.Exists(Path.Combine(linkDir, "file.txt"))); + + // Act. + IOUtil.DeleteDirectory(subDir1, CancellationToken.None); + + // Assert. + Assert.False(Directory.Exists(subDir1)); + Assert.True(Directory.Exists(targetDir)); + Assert.True(File.Exists(file)); + } + finally + { + // Cleanup. + if (Directory.Exists(randomDir)) + { + Directory.Delete(randomDir, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteDirectory_IgnoresFile() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string file = Path.Combine(directory, "some file"); + try + { + Directory.CreateDirectory(directory); + File.WriteAllText(path: file, contents: "some contents"); + + // Act: Call "DeleteDirectory" against the file. The method should not blow up and + // should simply ignore the file since it is not a directory. + IOUtil.DeleteDirectory(file, CancellationToken.None); + + // Assert. + Assert.True(File.Exists(file)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteFile_DeletesFile() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string file = Path.Combine(directory, "some file"); + try + { + Directory.CreateDirectory(directory); + File.WriteAllText(path: file, contents: "some contents"); + + // Act. + IOUtil.DeleteFile(file); + + // Assert. + Assert.False(File.Exists(file)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteFile_DeletesReadOnlyFile() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory with a read-only file. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + string file = Path.Combine(directory, "some file"); + try + { + Directory.CreateDirectory(directory); + File.WriteAllText(path: file, contents: "some contents"); + File.SetAttributes(file, File.GetAttributes(file) | FileAttributes.ReadOnly); + + // Act. + IOUtil.DeleteFile(file); + + // Assert. + Assert.False(File.Exists(file)); + } + finally + { + // Cleanup. + if (File.Exists(file)) + { + File.SetAttributes(file, File.GetAttributes(file) & ~FileAttributes.ReadOnly); + } + + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void DeleteFile_IgnoresDirectory() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + Directory.CreateDirectory(directory); + + // Act: Call "DeleteFile" against a directory. The method should not blow up and + // should simply ignore the directory since it is not a file. + IOUtil.DeleteFile(directory); + + // Assert. + Assert.True(Directory.Exists(directory)); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetRelativePath() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + string relativePath; +#if OS_WINDOWS + /// MakeRelative(@"d:\src\project\foo.cpp", @"d:\src") -> @"project\foo.cpp" + // Act. + relativePath = IOUtil.MakeRelative(@"d:\src\project\foo.cpp", @"d:\src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project\foo.cpp", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:\", @"d:\specs") -> @"d:\" + // Act. + relativePath = IOUtil.MakeRelative(@"d:\", @"d:\specs"); + // Assert. + Assert.True(string.Equals(relativePath, @"d:\", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:\src\project\foo.cpp", @"d:\src\proj") -> @"d:\src\project\foo.cpp" + // Act. + relativePath = IOUtil.MakeRelative(@"d:\src\project\foo.cpp", @"d:\src\proj"); + // Assert. + Assert.True(string.Equals(relativePath, @"d:\src\project\foo.cpp", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:\src\project\foo", @"d:\src") -> @"project\foo" + // Act. + relativePath = IOUtil.MakeRelative(@"d:\src\project\foo", @"d:\src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project\foo", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:\src\project\foo.cpp", @"d:\src\project\foo.cpp") -> @"" + // Act. + relativePath = IOUtil.MakeRelative(@"d:\src\project", @"d:\src\project"); + // Assert. + Assert.True(string.Equals(relativePath, string.Empty, StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:/src/project/foo.cpp", @"d:/src") -> @"project/foo.cpp" + // Act. + relativePath = IOUtil.MakeRelative(@"d:/src/project/foo.cpp", @"d:/src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project\foo.cpp", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:/src/project/foo.cpp", @"d:\src") -> @"d:/src/project/foo.cpp" + // Act. + relativePath = IOUtil.MakeRelative(@"d:/src/project/foo.cpp", @"d:/src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project\foo.cpp", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d:/src/project/foo", @"d:/src") -> @"project/foo" + // Act. + relativePath = IOUtil.MakeRelative(@"d:/src/project/foo", @"d:/src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project\foo", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"d\src\project", @"d:/src/project") -> @"" + // Act. + relativePath = IOUtil.MakeRelative(@"d:\src\project", @"d:/src/project"); + // Assert. + Assert.True(string.Equals(relativePath, string.Empty, StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); +#else + /// MakeRelative(@"/user/src/project/foo.cpp", @"/user/src") -> @"project/foo.cpp" + // Act. + relativePath = IOUtil.MakeRelative(@"/user/src/project/foo.cpp", @"/user/src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project/foo.cpp", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"/user", @"/user/specs") -> @"/user" + // Act. + relativePath = IOUtil.MakeRelative(@"/user", @"/user/specs"); + // Assert. + Assert.True(string.Equals(relativePath, @"/user", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"/user/src/project/foo.cpp", @"/user/src/proj") -> @"/user/src/project/foo.cpp" + // Act. + relativePath = IOUtil.MakeRelative(@"/user/src/project/foo.cpp", @"/user/src/proj"); + // Assert. + Assert.True(string.Equals(relativePath, @"/user/src/project/foo.cpp", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"/user/src/project/foo", @"/user/src") -> @"project/foo" + // Act. + relativePath = IOUtil.MakeRelative(@"/user/src/project/foo", @"/user/src"); + // Assert. + Assert.True(string.Equals(relativePath, @"project/foo", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); + + /// MakeRelative(@"/user/src/project", @"/user/src/project") -> @"" + // Act. + relativePath = IOUtil.MakeRelative(@"/user/src/project", @"/user/src/project"); + // Assert. + Assert.True(string.Equals(relativePath, string.Empty, StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {relativePath}"); +#endif + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ResolvePath() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + string resolvePath; +#if OS_WINDOWS + // Act. + resolvePath = IOUtil.ResolvePath(@"d:\src\project\", @"foo"); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\src\project\foo", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:\", @"specs"); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\specs", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:\src\project\", @"src\proj"); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\src\project\src\proj", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:\src\project\foo", @".."); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\src\project", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:\src\project", @"..\..\"); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:/src/project", @"../."); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\src", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:/src/project/", @"../../foo"); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\foo", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:/src/project/foo", @".././bar/.././../foo"); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\src\foo", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"d:\", @"."); + // Assert. + Assert.True(string.Equals(resolvePath, @"d:\", StringComparison.OrdinalIgnoreCase), $"resolvePath does not expected: {resolvePath}"); +#else + // Act. + resolvePath = IOUtil.ResolvePath(@"/user/src/project", @"foo"); + // Assert. + Assert.True(string.Equals(resolvePath, @"/user/src/project/foo", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/root", @"./user/./specs"); + // Assert. + Assert.True(string.Equals(resolvePath, @"/root/user/specs", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/", @"user/specs/."); + // Assert. + Assert.True(string.Equals(resolvePath, @"/user/specs", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/user/src/project", @"../"); + // Assert. + Assert.True(string.Equals(resolvePath, @"/user/src", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/user/src/project", @"../../"); + // Assert. + Assert.True(string.Equals(resolvePath, @"/user", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/user/src/project/foo", @"../../../../user/./src"); + // Assert. + Assert.True(string.Equals(resolvePath, @"/user/src", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/user/src", @"../../."); + // Assert. + Assert.True(string.Equals(resolvePath, @"/", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); + + // Act. + resolvePath = IOUtil.ResolvePath(@"/", @"./"); + // Assert. + Assert.True(string.Equals(resolvePath, @"/", StringComparison.OrdinalIgnoreCase), $"RelativePath does not expected: {resolvePath}"); +#endif + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ValidateExecutePermission_DoesNotExceedFailsafe() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a directory. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName()); + try + { + Directory.CreateDirectory(directory); + + // Act/Assert: Call "ValidateExecutePermission". The method should not blow up. + IOUtil.ValidateExecutePermission(directory); + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ValidateExecutePermission_ExceedsFailsafe() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange: Create a deep directory. + string directory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Bin), Path.GetRandomFileName(), "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20"); + try + { + Directory.CreateDirectory(directory); + Environment.SetEnvironmentVariable("AGENT_TEST_VALIDATE_EXECUTE_PERMISSIONS_FAILSAFE", "20"); + + try + { + // Act: Call "ValidateExecutePermission". The method should throw since + // it exceeds the failsafe recursion depth. + IOUtil.ValidateExecutePermission(directory); + + // Assert. + throw new Exception("Should have thrown not supported exception."); + } + catch (NotSupportedException) + { + } + } + finally + { + // Cleanup. + if (Directory.Exists(directory)) + { + Directory.Delete(directory, recursive: true); + } + } + } + } + + private static async Task CreateDirectoryReparsePoint(IHostContext context, string link, string target) + { +#if OS_WINDOWS + string fileName = Environment.GetEnvironmentVariable("ComSpec"); + string arguments = $@"/c ""mklink /J ""{link}"" {target}"""""; +#else + string fileName = "/bin/ln"; + string arguments = $@"-s ""{target}"" ""{link}"""; +#endif + ArgUtil.File(fileName, nameof(fileName)); + using (var processInvoker = new ProcessInvokerWrapper()) + { + processInvoker.Initialize(context); + await processInvoker.ExecuteAsync( + workingDirectory: context.GetDirectory(WellKnownDirectory.Bin), + fileName: fileName, + arguments: arguments, + environment: null, + requireExitCodeZero: true, + cancellationToken: CancellationToken.None); + } + } + } +} diff --git a/src/Test/L0/Util/StringUtilL0.cs b/src/Test/L0/Util/StringUtilL0.cs new file mode 100644 index 00000000000..c67b3922df2 --- /dev/null +++ b/src/Test/L0/Util/StringUtilL0.cs @@ -0,0 +1,190 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System.Globalization; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public class StringUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void FormatAlwaysCallsFormat() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + var variableSets = new[] + { + new { Format = null as string, Args = null as object[], Expected = string.Empty }, + new { Format = null as string, Args = new object[0], Expected = string.Empty }, + new { Format = null as string, Args = new object[] { 123 }, Expected = string.Empty }, + new { Format = "Some message", Args = null as object[], Expected = "Some message" }, + new { Format = "Some message", Args = new object[0], Expected = "Some message" }, + new { Format = "Some message", Args = new object[] { 123 }, Expected = "Some message" }, + new { Format = "Some format '{0}'", Args = null as object[], Expected = "Some format ''" }, + new { Format = "Some format '{0}'", Args = new object[0], Expected = "Some format ''" }, + new { Format = "Some format '{0}'", Args = new object[] { 123 }, Expected = "Some format '123'" }, + }; + foreach (var variableSet in variableSets) + { + trace.Info($"{nameof(variableSet)}:"); + trace.Info(variableSet); + + // Act. + string actual = StringUtil.Format(variableSet.Format, variableSet.Args); + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void FormatHandlesFormatException() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Arrange. + var variableSets = new[] + { + new { Format = "Bad format { 0}", Args = null as object[], Expected = "Bad format { 0}" }, + new { Format = "Bad format { 0}", Args = new object[0], Expected = "Bad format { 0} " }, + new { Format = "Bad format { 0}", Args = new object[] { null }, Expected = "Bad format { 0} " }, + new { Format = "Bad format { 0}", Args = new object[] { 123, 456 }, Expected = "Bad format { 0} 123, 456" }, + }; + foreach (var variableSet in variableSets) + { + trace.Info($"{nameof(variableSet)}:"); + trace.Info(variableSet); + + // Act. + string actual = StringUtil.Format(variableSet.Format, variableSet.Args); + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void FormatUsesInvariantCulture() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + CultureInfo originalCulture = CultureInfo.CurrentCulture; + try + { + CultureInfo.CurrentCulture = new CultureInfo("it-IT"); + + // Act. + string actual = StringUtil.Format("{0:N2}", 123456.789); + + // Actual + Assert.Equal("123,456.79", actual); + } + finally + { + CultureInfo.CurrentCulture = originalCulture; + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ConvertNullOrEmptryStringToBool() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + string nullString = null; + string emptyString = string.Empty; + + // Act. + bool result1 = StringUtil.ConvertToBoolean(nullString); + bool result2 = StringUtil.ConvertToBoolean(emptyString); + + // Actual + Assert.False(result1, "Null String should convert to false."); + Assert.False(result2, "Empty String should convert to false."); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ConvertNullOrEmptryStringToDefaultBool() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + string nullString = null; + string emptyString = string.Empty; + + // Act. + bool result1 = StringUtil.ConvertToBoolean(nullString, true); + bool result2 = StringUtil.ConvertToBoolean(emptyString, true); + + // Actual + Assert.True(result1, "Null String should convert to true since default value is set to true."); + Assert.True(result2, "Empty String should convert to true since default value is set to true."); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void ConvertStringToBool() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + string trueString1 = "1"; + string trueString2 = "True"; + string trueString3 = "$TRUE"; + string falseString1 = "0"; + string falseString2 = "false"; + string falseString3 = "$False"; + + string undefineString1 = "-1"; + string undefineString2 = "sometext"; + string undefineString3 = "2015-03-21"; + + // Act. + bool result1 = StringUtil.ConvertToBoolean(trueString1, false); + bool result2 = StringUtil.ConvertToBoolean(trueString2); + bool result3 = StringUtil.ConvertToBoolean(trueString3, true); + bool result4 = StringUtil.ConvertToBoolean(falseString1, true); + bool result5 = StringUtil.ConvertToBoolean(falseString2); + bool result6 = StringUtil.ConvertToBoolean(falseString3, false); + + bool result7 = StringUtil.ConvertToBoolean(undefineString1, true); + bool result8 = StringUtil.ConvertToBoolean(undefineString2); + bool result9 = StringUtil.ConvertToBoolean(undefineString3, false); + + // Actual + Assert.True(result1, $"'{trueString1}' should convert to true."); + Assert.True(result2, $"'{trueString2}' should convert to true."); + Assert.True(result3, $"'{trueString3}' should convert to true."); + Assert.False(result4, $"'{falseString1}' should convert to false."); + Assert.False(result5, $"'{falseString2}' should convert to false."); + Assert.False(result6, $"'{falseString3}' should convert to false."); + + Assert.True(result7, $"'{undefineString1}' should convert to true, since default is true."); + Assert.False(result8, $"'{undefineString2}' should convert to false."); + Assert.False(result9, $"'{undefineString3}' should convert to false."); + } + } + } +} diff --git a/src/Test/L0/Util/TaskResultUtilL0.cs b/src/Test/L0/Util/TaskResultUtilL0.cs new file mode 100644 index 00000000000..b12d07c6ab4 --- /dev/null +++ b/src/Test/L0/Util/TaskResultUtilL0.cs @@ -0,0 +1,206 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public class TaskResultUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void TaskResultReturnCodeTranslate() + { + // Arrange. + using (TestHostContext hc = new TestHostContext(this)) + { + // Act. + TaskResult abandon = TaskResultUtil.TranslateFromReturnCode(TaskResultUtil.TranslateToReturnCode(TaskResult.Abandoned)); + // Actual + Assert.Equal(TaskResult.Abandoned, abandon); + + // Act. + TaskResult canceled = TaskResultUtil.TranslateFromReturnCode(TaskResultUtil.TranslateToReturnCode(TaskResult.Canceled)); + // Actual + Assert.Equal(TaskResult.Canceled, canceled); + + // Act. + TaskResult failed = TaskResultUtil.TranslateFromReturnCode(TaskResultUtil.TranslateToReturnCode(TaskResult.Failed)); + // Actual + Assert.Equal(TaskResult.Failed, failed); + + // Act. + TaskResult skipped = TaskResultUtil.TranslateFromReturnCode(TaskResultUtil.TranslateToReturnCode(TaskResult.Skipped)); + // Actual + Assert.Equal(TaskResult.Skipped, skipped); + + // Act. + TaskResult succeeded = TaskResultUtil.TranslateFromReturnCode(TaskResultUtil.TranslateToReturnCode(TaskResult.Succeeded)); + // Actual + Assert.Equal(TaskResult.Succeeded, succeeded); + + // Act. + TaskResult unknowReturnCode1 = TaskResultUtil.TranslateFromReturnCode(0); + // Actual + Assert.Equal(TaskResult.Failed, unknowReturnCode1); + + // Act. + TaskResult unknowReturnCode2 = TaskResultUtil.TranslateFromReturnCode(1); + // Actual + Assert.Equal(TaskResult.Failed, unknowReturnCode2); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void TaskResultsMerge() + { + // Arrange. + using (TestHostContext hc = new TestHostContext(this)) + { + TaskResult merged; + + // + // No current result merge. + // + // Act. + merged = TaskResultUtil.MergeTaskResults(null, TaskResult.Succeeded); + // Actual + Assert.Equal(TaskResult.Succeeded, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(null, TaskResult.Abandoned); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(null, TaskResult.Canceled); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(null, TaskResult.Failed); + // Actual + Assert.Equal(TaskResult.Failed, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(null, TaskResult.Skipped); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + + // + // Same result merge. + // + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Succeeded, TaskResult.Succeeded); + // Actual + Assert.Equal(TaskResult.Succeeded, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Abandoned, TaskResult.Abandoned); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Canceled, TaskResult.Canceled); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Failed, TaskResult.Failed); + // Actual + Assert.Equal(TaskResult.Failed, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Skipped, TaskResult.Skipped); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + + // + // Forward result merge + // + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Succeeded, TaskResult.Abandoned); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Succeeded, TaskResult.Canceled); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Succeeded, TaskResult.Failed); + // Actual + Assert.Equal(TaskResult.Failed, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Succeeded, TaskResult.Skipped); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + + // + // No backward merge + // + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Abandoned, TaskResult.Succeeded); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Canceled, TaskResult.Succeeded); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Failed, TaskResult.Succeeded); + // Actual + Assert.Equal(TaskResult.Failed, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Skipped, TaskResult.Succeeded); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + + // + // Worst result no change + // + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Abandoned, TaskResult.Canceled); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Abandoned, TaskResult.Failed); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Abandoned, TaskResult.Skipped); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Canceled, TaskResult.Abandoned); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Canceled, TaskResult.Failed); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Canceled, TaskResult.Skipped); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Failed, TaskResult.Abandoned); + // Actual + Assert.Equal(TaskResult.Abandoned, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Failed, TaskResult.Canceled); + // Actual + Assert.Equal(TaskResult.Canceled, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Failed, TaskResult.Skipped); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Skipped, TaskResult.Abandoned); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Skipped, TaskResult.Canceled); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + // Act. + merged = TaskResultUtil.MergeTaskResults(TaskResult.Skipped, TaskResult.Failed); + // Actual + Assert.Equal(TaskResult.Skipped, merged); + } + } + } +} diff --git a/src/Test/L0/Util/UrlUtilL0.cs b/src/Test/L0/Util/UrlUtilL0.cs new file mode 100644 index 00000000000..b84db224256 --- /dev/null +++ b/src/Test/L0/Util/UrlUtilL0.cs @@ -0,0 +1,65 @@ +using System; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public class UrlUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetCredentialEmbeddedUrl_NoUsernameAndPassword() + { + // Act. + Uri result = UrlUtil.GetCredentialEmbeddedUrl(new Uri("https://github.com/actions/runner.git"), string.Empty, string.Empty); + // Actual + Assert.Equal("https://github.com/actions/runner.git", result.AbsoluteUri); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetCredentialEmbeddedUrl_NoUsername() + { + // Act. + Uri result = UrlUtil.GetCredentialEmbeddedUrl(new Uri("https://github.com/actions/runner.git"), string.Empty, "password123"); + // Actual + Assert.Equal("https://emptyusername:password123@github.com/actions/runner.git", result.AbsoluteUri); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetCredentialEmbeddedUrl_NoPassword() + { + // Act. + Uri result = UrlUtil.GetCredentialEmbeddedUrl(new Uri("https://github.com/actions/runner.git"), "user123", string.Empty); + // Actual + Assert.Equal("https://user123@github.com/actions/runner.git", result.AbsoluteUri); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetCredentialEmbeddedUrl_HasUsernameAndPassword() + { + // Act. + Uri result = UrlUtil.GetCredentialEmbeddedUrl(new Uri("https://github.com/actions/runner.git"), "user123", "password123"); + // Actual + Assert.Equal("https://user123:password123@github.com/actions/runner.git", result.AbsoluteUri); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void GetCredentialEmbeddedUrl_UsernameAndPasswordEncoding() + { + // Act. + Uri result = UrlUtil.GetCredentialEmbeddedUrl(new Uri("https://github.com/actions/runner.git"), "user 123", "password 123"); + // Actual + Assert.Equal("https://user%20123:password%20123@github.com/actions/runner.git", result.AbsoluteUri); + } + } +} diff --git a/src/Test/L0/Util/VssUtilL0.cs b/src/Test/L0/Util/VssUtilL0.cs new file mode 100644 index 00000000000..876362dbb65 --- /dev/null +++ b/src/Test/L0/Util/VssUtilL0.cs @@ -0,0 +1,56 @@ +using GitHub.Runner.Common.Util; +using GitHub.Services.Common; +using System; +using System.Collections.Generic; +using System.Net.Http.Headers; +using Xunit; +using System.Text.RegularExpressions; +using GitHub.Runner.Sdk; + +namespace GitHub.Runner.Common.Tests.Util +{ + public sealed class VssUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void VerifyOverwriteVssConnectionSetting() + { + using (TestHostContext hc = new TestHostContext(this)) + { + Tracing trace = hc.GetTrace(); + + // Act. + try + { + trace.Info("Set httpretry to 10."); + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_RETRY", "10"); + trace.Info("Set httptimeout to 360."); + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_TIMEOUT", "360"); + + var connect = VssUtil.CreateConnection(new Uri("https://github.com/actions/runner"), new VssCredentials()); + + // Assert. + Assert.Equal(connect.Settings.MaxRetryRequest.ToString(), "10"); + Assert.Equal(connect.Settings.SendTimeout.TotalSeconds.ToString(), "360"); + + trace.Info("Set httpretry to 100."); + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_RETRY", "100"); + trace.Info("Set httptimeout to 3600."); + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_TIMEOUT", "3600"); + + connect = VssUtil.CreateConnection(new Uri("https://github.com/actions/runner"), new VssCredentials()); + + // Assert. + Assert.Equal(connect.Settings.MaxRetryRequest.ToString(), "10"); + Assert.Equal(connect.Settings.SendTimeout.TotalSeconds.ToString(), "1200"); + } + finally + { + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_RETRY", ""); + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTP_TIMEOUT", ""); + } + } + } + } +} diff --git a/src/Test/L0/Util/WhichUtilL0.cs b/src/Test/L0/Util/WhichUtilL0.cs new file mode 100644 index 00000000000..99e4a92a5ee --- /dev/null +++ b/src/Test/L0/Util/WhichUtilL0.cs @@ -0,0 +1,74 @@ +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using System; +using System.IO; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Util +{ + public sealed class WhichUtilL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void UseWhichFindGit() + { + using (TestHostContext hc = new TestHostContext(this)) + { + //Arrange + Tracing trace = hc.GetTrace(); + + // Act. + string gitPath = WhichUtil.Which("git", trace: trace); + + trace.Info($"Which(\"git\") returns: {gitPath ?? string.Empty}"); + + // Assert. + Assert.True(!string.IsNullOrEmpty(gitPath) && File.Exists(gitPath), $"Unable to find Git through: {nameof(WhichUtil.Which)}"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void WhichReturnsNullWhenNotFound() + { + using (TestHostContext hc = new TestHostContext(this)) + { + //Arrange + Tracing trace = hc.GetTrace(); + + // Act. + string nosuch = WhichUtil.Which("no-such-file-cf7e351f", trace: trace); + + trace.Info($"result: {nosuch ?? string.Empty}"); + + // Assert. + Assert.True(string.IsNullOrEmpty(nosuch), "Path should not be resolved"); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Common")] + public void WhichThrowsWhenRequireAndNotFound() + { + using (TestHostContext hc = new TestHostContext(this)) + { + //Arrange + Tracing trace = hc.GetTrace(); + + // Act. + try + { + WhichUtil.Which("no-such-file-cf7e351f", require: true, trace: trace); + throw new Exception("which should have thrown"); + } + catch (FileNotFoundException ex) + { + Assert.Equal("no-such-file-cf7e351f", ex.FileName); + } + } + } + } +} diff --git a/src/Test/L0/Worker/ActionCommandL0.cs b/src/Test/L0/Worker/ActionCommandL0.cs new file mode 100644 index 00000000000..b65714ac157 --- /dev/null +++ b/src/Test/L0/Worker/ActionCommandL0.cs @@ -0,0 +1,211 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public class LoggingCommandL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "LoggingCommand")] + public void CommandParserTest() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + string message; + ActionCommand test; + ActionCommand verify; + HashSet commands = new HashSet() { "do-something" }; + //##[do-something k1=v1;]msg + message = "##[do-something k1=v1;]msg"; + test = new ActionCommand("do-something") + { + Data = "msg", + }; + test.Properties.Add("k1", "v1"); + Assert.True(ActionCommand.TryParse(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //##[do-something] + message = "##[do-something]"; + test = new ActionCommand("do-something"); + Assert.True(ActionCommand.TryParse(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //##[do-something k1=%3B=%0D=%0A=%5D;]%3B-%0D-%0A-%5D + message = "##[do-something k1=%3B=%0D=%0A=%5D;]%3B-%0D-%0A-%5D"; + test = new ActionCommand("do-something") + { + Data = ";-\r-\n-]", + }; + test.Properties.Add("k1", ";=\r=\n=]"); + Assert.True(ActionCommand.TryParse(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //##[do-something k1=;k2=;] + message = "##[do-something k1=;k2=;]"; + test = new ActionCommand("do-something"); + Assert.True(ActionCommand.TryParse(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //>>> ##[do-something k1=;k2=;] + message = ">>> ##[do-something k1=v1;]msg"; + test = new ActionCommand("do-something") + { + Data = "msg", + }; + test.Properties.Add("k1", "v1"); + Assert.True(ActionCommand.TryParse(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "LoggingCommand")] + public void CommandParserV2Test() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + string message; + ActionCommand test; + ActionCommand verify; + HashSet commands = new HashSet() { "do-something" }; + //::do-something k1=v1;]msg + message = "::do-something k1=v1,::msg"; + test = new ActionCommand("do-something") + { + Data = "msg", + }; + test.Properties.Add("k1", "v1"); + Assert.True(ActionCommand.TryParseV2(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //::do-something:: + message = "::do-something::"; + test = new ActionCommand("do-something"); + Assert.True(ActionCommand.TryParseV2(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //::do-something k1=%3B=%0D=%0A=%5D;::%3B-%0D-%0A-%5D + message = "::do-something k1=;=%2C=%0D=%0A=]=%3A,::;-%0D-%0A-]-:-,"; + test = new ActionCommand("do-something") + { + Data = ";-\r-\n-]-:-,", + }; + test.Properties.Add("k1", ";=,=\r=\n=]=:"); + Assert.True(ActionCommand.TryParseV2(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //::do-something k1=,k2=,:: + message = "::do-something k1=,k2=,::"; + test = new ActionCommand("do-something"); + Assert.True(ActionCommand.TryParseV2(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + //::do-something k1=v1:: + message = "::do-something k1=v1::"; + test = new ActionCommand("do-something"); + test.Properties.Add("k1", "v1"); + Assert.True(ActionCommand.TryParseV2(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + test = null; + verify = null; + // ::do-something k1=v1,:: + message = " ::do-something k1=v1,::msg"; + test = new ActionCommand("do-something") + { + Data = "msg", + }; + test.Properties.Add("k1", "v1"); + Assert.True(ActionCommand.TryParseV2(message, commands, out verify)); + Assert.True(IsEqualCommand(hc, test, verify)); + + message = ""; + verify = null; + // >>> ::do-something k1=v1,:: + message = " >>> ::do-something k1=v1,::msg"; + Assert.False(ActionCommand.TryParseV2(message, commands, out verify)); + } + } + + private bool IsEqualCommand(IHostContext hc, ActionCommand e1, ActionCommand e2) + { + try + { + if (!string.Equals(e1.Command, e2.Command, StringComparison.OrdinalIgnoreCase)) + { + hc.GetTrace("CommandEqual").Info("Command 1={0}, Command 2={1}", e1.Command, e2.Command); + return false; + } + + if (!string.Equals(e1.Data, e2.Data, StringComparison.OrdinalIgnoreCase) && (!string.IsNullOrEmpty(e1.Data) && !string.IsNullOrEmpty(e2.Data))) + { + hc.GetTrace("CommandEqual").Info("Data 1={0}, Data 2={1}", e1.Data, e2.Data); + return false; + } + + if (e1.Properties.Count != e2.Properties.Count) + { + hc.GetTrace("CommandEqual").Info("Logging events contain different numbers of Properties,{0} to {1}", e1.Properties.Count, e2.Properties.Count); + return false; + } + + if (!e1.Properties.SequenceEqual(e2.Properties)) + { + hc.GetTrace("CommandEqual").Info("Logging events contain different Properties"); + hc.GetTrace("CommandEqual").Info("Properties for event 1:"); + foreach (var data in e1.Properties) + { + hc.GetTrace("CommandEqual").Info("Key={0}, Value={1}", data.Key, data.Value); + } + + hc.GetTrace("CommandEqual").Info("Properties for event 2:"); + foreach (var data in e2.Properties) + { + hc.GetTrace("CommandEqual").Info("Key={0}, Value={1}", data.Key, data.Value); + } + + return false; + } + } + catch (Exception ex) + { + hc.GetTrace("CommandEqual").Info("Catch Exception during compare:{0}", ex.ToString()); + return false; + } + + return true; + } + } +} diff --git a/src/Test/L0/Worker/ActionCommandManagerL0.cs b/src/Test/L0/Worker/ActionCommandManagerL0.cs new file mode 100644 index 00000000000..a2b8f33a3c0 --- /dev/null +++ b/src/Test/L0/Worker/ActionCommandManagerL0.cs @@ -0,0 +1,150 @@ +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Worker; +using Moq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class ActionCommandManagerL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void EnablePluginInternalCommand() + { + using (TestHostContext _hc = new TestHostContext(this)) + { + var extensionManger = new Mock(); + var directoryManager = new Mock(); + + var pluginCommand = new InternalPluginSetRepoPathCommandExtension(); + pluginCommand.Initialize(_hc); + + var envCommand = new SetEnvCommandExtension(); + envCommand.Initialize(_hc); + + extensionManger.Setup(x => x.GetExtensions()) + .Returns(new List() { pluginCommand, envCommand }); + _hc.SetSingleton(extensionManger.Object); + _hc.SetSingleton(directoryManager.Object); + + Mock _ec = new Mock(); + _ec.Setup(x => x.Write(It.IsAny(), It.IsAny())) + .Returns((string tag, string line) => + { + _hc.GetTrace().Info($"{tag} {line}"); + return 1; + }); + _ec.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())) + .Callback((Issue issue, string message) => + { + _hc.GetTrace().Info($"{issue.Type} {issue.Message} {message ?? string.Empty}"); + }); + ActionCommandManager commandManager = new ActionCommandManager(); + commandManager.Initialize(_hc); + + commandManager.EnablePluginInternalCommand(); + + Assert.True(commandManager.TryProcessCommand(_ec.Object, "##[internal-set-repo-path repoFullName=actions/runner;workspaceRepo=true]somepath")); + + directoryManager.Verify(x => x.UpdateRepositoryDirectory(_ec.Object, "actions/runner", "somepath", true), Times.Once); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void DisablePluginInternalCommand() + { + using (TestHostContext _hc = new TestHostContext(this)) + { + var extensionManger = new Mock(); + var directoryManager = new Mock(); + + var pluginCommand = new InternalPluginSetRepoPathCommandExtension(); + pluginCommand.Initialize(_hc); + + var envCommand = new SetEnvCommandExtension(); + envCommand.Initialize(_hc); + + extensionManger.Setup(x => x.GetExtensions()) + .Returns(new List() { pluginCommand, envCommand }); + + _hc.SetSingleton(extensionManger.Object); + _hc.SetSingleton(directoryManager.Object); + + Mock _ec = new Mock(); + _ec.Setup(x => x.Write(It.IsAny(), It.IsAny())) + .Returns((string tag, string line) => + { + _hc.GetTrace().Info($"{tag} {line}"); + return 1; + }); + _ec.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())) + .Callback((Issue issue, string message) => + { + _hc.GetTrace().Info($"{issue.Type} {issue.Message} {message ?? string.Empty}"); + }); + ActionCommandManager commandManager = new ActionCommandManager(); + commandManager.Initialize(_hc); + + commandManager.EnablePluginInternalCommand(); + + Assert.True(commandManager.TryProcessCommand(_ec.Object, "##[internal-set-repo-path repoFullName=actions/runner;workspaceRepo=true]somepath")); + + commandManager.DisablePluginInternalCommand(); + + Assert.False(commandManager.TryProcessCommand(_ec.Object, "##[internal-set-repo-path repoFullName=actions/runner;workspaceRepo=true]somepath")); + + directoryManager.Verify(x => x.UpdateRepositoryDirectory(_ec.Object, "actions/runner", "somepath", true), Times.Once); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void StopProcessCommand() + { + using (TestHostContext _hc = new TestHostContext(this)) + { + var extensionManger = new Mock(); + var pluginCommand = new InternalPluginSetRepoPathCommandExtension(); + pluginCommand.Initialize(_hc); + + var envCommand = new SetEnvCommandExtension(); + envCommand.Initialize(_hc); + + extensionManger.Setup(x => x.GetExtensions()) + .Returns(new List() { pluginCommand, envCommand }); + _hc.SetSingleton(extensionManger.Object); + + Mock _ec = new Mock(); + _ec.Setup(x => x.Write(It.IsAny(), It.IsAny())) + .Returns((string tag, string line) => + { + _hc.GetTrace().Info($"{tag} {line}"); + return 1; + }); + + _ec.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())) + .Callback((Issue issue, string message) => + { + _hc.GetTrace().Info($"{issue.Type} {issue.Message} {message ?? string.Empty}"); + }); + + _ec.Setup(x => x.EnvironmentVariables).Returns(new Dictionary()); + + ActionCommandManager commandManager = new ActionCommandManager(); + commandManager.Initialize(_hc); + + Assert.True(commandManager.TryProcessCommand(_ec.Object, "##[stop-commands]stopToken")); + Assert.False(commandManager.TryProcessCommand(_ec.Object, "##[set-env name=foo]bar")); + Assert.True(commandManager.TryProcessCommand(_ec.Object, "##[stopToken]")); + Assert.True(commandManager.TryProcessCommand(_ec.Object, "##[set-env name=foo]bar")); + } + } + } +} diff --git a/src/Test/L0/Worker/ActionManagerL0.cs b/src/Test/L0/Worker/ActionManagerL0.cs new file mode 100644 index 00000000000..73c71fcc7bb --- /dev/null +++ b/src/Test/L0/Worker/ActionManagerL0.cs @@ -0,0 +1,1759 @@ +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Worker; +using GitHub.Runner.Worker.Container; +using Moq; +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Xunit; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class ActionManagerL0 + { + private const string TestDataFolderName = "TestData"; + private CancellationTokenSource _ecTokenSource; + private Mock _configurationStore; + private Mock _dockerManager; + private Mock _ec; + private Mock _pluginManager; + private TestHostContext _hc; + private ActionManager _actionManager; + private string _workFolder; + + // //Test how exceptions are propagated to the caller. + // [Fact] + // [Trait("Level", "L0")] + // [Trait("Category", "Worker")] + // public async void RetryNetworkException() + // { + // try + // { + // // Arrange. + // Setup(); + // var pingTask = new Pipelines.TaskStep() + // { + // Enabled = true, + // Reference = new Pipelines.TaskStepDefinitionReference() + // { + // Name = "Ping", + // Version = "0.1.1", + // Id = Guid.NewGuid() + // } + // }; + + // var pingVersion = new TaskVersion(pingTask.Reference.Version); + // Exception expectedException = new System.Net.Http.HttpRequestException("simulated network error"); + // _taskServer + // .Setup(x => x.GetTaskContentZipAsync(It.IsAny(), It.IsAny(), It.IsAny())) + // .Returns((Guid taskId, TaskVersion taskVersion, CancellationToken token) => + // { + // throw expectedException; + // }); + + // var tasks = new List(new Pipelines.TaskStep[] { pingTask }); + + // //Act + // Exception actualException = null; + // try + // { + // await _actionManager.DownloadAsync(_ec.Object, tasks); + // } + // catch (Exception ex) + // { + // actualException = ex; + // } + + // //Assert + // //verify task completed in less than 2sec and it is in failed state state + // Assert.Equal(expectedException, actualException); + + // //assert download was invoked 3 times, because we retry on task download + // _taskServer + // .Verify(x => x.GetTaskContentZipAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(3)); + + // //see if the task.json was not downloaded + // Assert.Equal( + // 0, + // Directory.GetFiles(_hc.GetDirectory(WellKnownDirectory.Tasks), "*", SearchOption.AllDirectories).Length); + // } + // finally + // { + // Teardown(); + // } + // } + + // //Test how exceptions are propagated to the caller. + // [Fact] + // [Trait("Level", "L0")] + // [Trait("Category", "Worker")] + // public async void RetryStreamException() + // { + // try + // { + // // Arrange. + // Setup(); + // var pingTask = new Pipelines.TaskStep() + // { + // Enabled = true, + // Reference = new Pipelines.TaskStepDefinitionReference() + // { + // Name = "Ping", + // Version = "0.1.1", + // Id = Guid.NewGuid() + // } + // }; + + // var pingVersion = new TaskVersion(pingTask.Reference.Version); + // Exception expectedException = new System.Net.Http.HttpRequestException("simulated network error"); + // _taskServer + // .Setup(x => x.GetTaskContentZipAsync(It.IsAny(), It.IsAny(), It.IsAny())) + // .Returns((Guid taskId, TaskVersion taskVersion, CancellationToken token) => + // { + // return Task.FromResult(new ExceptionStream()); + // }); + + // var tasks = new List(new Pipelines.TaskStep[] { pingTask }); + + // //Act + // Exception actualException = null; + // try + // { + // await _actionManager.DownloadAsync(_ec.Object, tasks); + // } + // catch (Exception ex) + // { + // actualException = ex; + // } + + // //Assert + // //verify task completed in less than 2sec and it is in failed state state + // Assert.Equal("NotImplementedException", actualException.GetType().Name); + + // //assert download was invoked 3 times, because we retry on task download + // _taskServer + // .Verify(x => x.GetTaskContentZipAsync(It.IsAny(), It.IsAny(), It.IsAny()), Times.Exactly(3)); + + // //see if the task.json was not downloaded + // Assert.Equal( + // 0, + // Directory.GetFiles(_hc.GetDirectory(WellKnownDirectory.Tasks), "*", SearchOption.AllDirectories).Length); + // } + // finally + // { + // Teardown(); + // } + // } + +#if OS_LINUX + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_PullImageFromDockerHub() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:16.04" + } + } + }; + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + //Assert + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Image, "ubuntu:16.04"); + } + finally + { + Teardown(); + } + } +#endif + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_DownloadActionFromGraph() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/npm", + Ref = "master", + RepositoryType = "GitHub" + } + } + }; + + //Act + await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + //Assert + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/npm", "master.completed"); + Assert.True(File.Exists(watermarkFile)); + + var actionDockerfile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/npm", "master", "Dockerfile"); + Assert.True(File.Exists(actionDockerfile)); + _hc.GetTrace().Info(File.ReadAllText(actionDockerfile)); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_SkipDownloadActionFromGraphWhenCache() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "dockerfileaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + + //Act + await _actionManager.PrepareActionsAsync(_ec.Object, actions); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_SkipDownloadActionForSelfRepo() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Path = "action", + RepositoryType = Pipelines.PipelineConstants.SelfAlias + } + } + }; + + //Act + await _actionManager.PrepareActionsAsync(_ec.Object, actions); + } + finally + { + Teardown(); + } + } + +#if OS_LINUX + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithDockerfile() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/test", + Ref = "master", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/test", "master.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "master")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "master", "Dockerfile"), "Fake Dockerfile"); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "master")); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "master", "Dockerfile")); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithDockerfileInRelativePath() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/test", + Ref = "master", + Path = "images/cli", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/test", "master.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "master/images/cli")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "master/images/cli/Dockerfile"), "Fake Dockerfile"); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "master")); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "master", "images/cli", "Dockerfile")); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithActionfile_Dockerfile() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "dockerfileaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist/Dockerfile"), "Fake Dockerfile"); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "Dockerfile")); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithActionfile_DockerfileRelativePath() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "dockerfilerelativeaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "master/images")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "master/images/Dockerfile"), "Fake Dockerfile"); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "images/Dockerfile")); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithActionfile_DockerHubImage() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "dockerhubaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Image, "ubuntu:18.04"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithActionfileAndDockerfile() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "dockerhubaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "Dockerfile"), "Fake Dockerfile"); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Image, "ubuntu:18.04"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_NotPullOrBuildImagesMultipleTimes() + { + try + { + //Arrange + Setup(); + var actionId1 = Guid.NewGuid(); + var actionId2 = Guid.NewGuid(); + var actionId3 = Guid.NewGuid(); + var actionId4 = Guid.NewGuid(); + var actionId5 = Guid.NewGuid(); + var actionId6 = Guid.NewGuid(); + var actionId7 = Guid.NewGuid(); + var actionId8 = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId1, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:16.04" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId2, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:18.04" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId3, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:18.04" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId4, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId5, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/test", + Ref = "master", + RepositoryType = "GitHub" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId6, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/test", + Ref = "release", + RepositoryType = "GitHub" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId7, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/test", + Ref = "release", + RepositoryType = "GitHub" + } + }, + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId8, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "actions/test", + Ref = "master", + Path = "images/cli", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "dockerhubaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + + watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/test", "master.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "master")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "master", "Dockerfile"), "Fake Dockerfile"); + + watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/test", "release.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "release")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "release", "Dockerfile"), "Fake Dockerfile"); + + watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "actions/test", "master.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "master/images/cli")); + File.WriteAllText(Path.Combine(Path.GetDirectoryName(watermarkFile), "master/images/cli/Dockerfile"), "Fake Dockerfile"); + + //Act + var steps = await _actionManager.PrepareActionsAsync(_ec.Object, actions); + + //Assert + Assert.Equal((steps[0].Data as ContainerSetupInfo).StepIds[0], actionId1); + Assert.Equal((steps[0].Data as ContainerSetupInfo).Container.Image, "ubuntu:16.04"); + + Assert.True((steps[1].Data as ContainerSetupInfo).StepIds.Contains(actionId2)); + Assert.True((steps[1].Data as ContainerSetupInfo).StepIds.Contains(actionId3)); + Assert.True((steps[1].Data as ContainerSetupInfo).StepIds.Contains(actionId4)); + Assert.Equal((steps[1].Data as ContainerSetupInfo).Container.Image, "ubuntu:18.04"); + + Assert.Equal((steps[2].Data as ContainerSetupInfo).StepIds[0], actionId5); + Assert.Equal((steps[2].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "master")); + Assert.Equal((steps[2].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "master", "Dockerfile")); + + Assert.True((steps[3].Data as ContainerSetupInfo).StepIds.Contains(actionId6)); + Assert.True((steps[3].Data as ContainerSetupInfo).StepIds.Contains(actionId7)); + Assert.Equal((steps[3].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "release")); + Assert.Equal((steps[3].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "release", "Dockerfile")); + + Assert.Equal((steps[4].Data as ContainerSetupInfo).StepIds[0], actionId8); + Assert.Equal((steps[4].Data as ContainerSetupInfo).Container.WorkingDirectory, Path.Combine(Path.GetDirectoryName(watermarkFile), "master")); + Assert.Equal((steps[4].Data as ContainerSetupInfo).Container.Dockerfile, Path.Combine(Path.GetDirectoryName(watermarkFile), "master", "images/cli", "Dockerfile")); + } + finally + { + Teardown(); + } + } +#endif + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void PrepareActions_RepositoryActionWithActionfile_Node() + { + try + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actions = new List + { + new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.RepositoryPathReference() + { + Name = "notexist/no", + Ref = "notexist", + RepositoryType = "GitHub" + } + } + }; + + var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "notexist/no", "notexist.completed"); + Directory.CreateDirectory(Path.GetDirectoryName(watermarkFile)); + File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); + Directory.CreateDirectory(Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist")); + File.Copy(Path.Combine(Environment.GetEnvironmentVariable("GITHUB_RUNNER_SRC_DIR"), "Test", TestDataFolderName, "nodeaction.yml"), Path.Combine(Path.GetDirectoryName(watermarkFile), "notexist", "action.yml")); + + //Act + await _actionManager.PrepareActionsAsync(_ec.Object, actions); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsContainerRegistryActionDefinition() + { + try + { + //Arrange + Setup(); + + Pipelines.ActionStep instance = new Pipelines.ActionStep() + { + Id = Guid.NewGuid(), + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:16.04" + } + }; + + _actionManager.CachedActionContainers[instance.Id] = new ContainerInfo() { ContainerImage = "ubuntu:16.04" }; + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.NotNull(definition.Data); + Assert.Equal("ubuntu:16.04", (definition.Data.Execution as ContainerActionExecutionData).Image); + Assert.True(string.IsNullOrEmpty((definition.Data.Execution as ContainerActionExecutionData).EntryPoint)); + Assert.Null((definition.Data.Execution as ContainerActionExecutionData).Arguments); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsScriptActionDefinition() + { + try + { + //Arrange + Setup(); + + Pipelines.ActionStep instance = new Pipelines.ActionStep() + { + Id = Guid.NewGuid(), + Reference = new Pipelines.ScriptReference() + }; + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.NotNull(definition.Data); + Assert.True(definition.Data.Execution.ExecutionType == ActionExecutionType.Script); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsContainerActionDefinitionDockerfile() + { + try + { + // Arrange. + Setup(); + // Prepare the task.json content. + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + _actionManager.CachedActionContainers[instance.Id] = new ContainerInfo() { ContainerImage = "image:1234" }; + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as ContainerActionExecutionData)); // execution.Node + Assert.Equal("image:1234", (definition.Data.Execution as ContainerActionExecutionData).Image); + Assert.Equal("main.sh", (definition.Data.Execution as ContainerActionExecutionData).EntryPoint); + + foreach (var arg in (definition.Data.Execution as ContainerActionExecutionData).Arguments) + { + Assert.Equal("${{ inputs.greeting }}", arg.AssertScalar("arg").ToString()); + } + + foreach (var env in (definition.Data.Execution as ContainerActionExecutionData).Environment) + { + var key = env.Key.AssertString("key").Value; + if (key == "Token") + { + Assert.Equal("foo", env.Value.AssertString("value").Value); + } + else if (key == "Url") + { + Assert.Equal("bar", env.Value.AssertString("value").Value); + } + else + { + throw new NotSupportedException(key); + } + } + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsContainerActionDefinitionRegistry() + { + try + { + // Arrange. + Setup(); + // Prepare the task.json content. + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'docker://ubuntu:16.04' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: ${{inputs.greeting}} +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + + _actionManager.CachedActionContainers[instance.Id] = new ContainerInfo() { ContainerImage = "ubuntu:16.04" }; + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as ContainerActionExecutionData)); + Assert.Equal("ubuntu:16.04", (definition.Data.Execution as ContainerActionExecutionData).Image); + Assert.Equal("main.sh", (definition.Data.Execution as ContainerActionExecutionData).EntryPoint); + + foreach (var arg in (definition.Data.Execution as ContainerActionExecutionData).Arguments) + { + Assert.Equal("${{ inputs.greeting }}", arg.AssertScalar("arg").ToString()); + } + + foreach (var env in (definition.Data.Execution as ContainerActionExecutionData).Environment) + { + var key = env.Key.AssertString("key").Value; + if (key == "Token") + { + Assert.Equal("foo", env.Value.AssertString("value").Value); + } + else if (key == "Url") + { + Assert.Equal("${{ inputs.greeting }}", env.Value.AssertScalar("value").ToString()); + } + else + { + throw new NotSupportedException(key); + } + } + + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsNodeActionDefinition() + { + try + { + // Arrange. + Setup(); + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node12' + main: 'task.js' +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as NodeJSActionExecutionData)); + Assert.Equal("task.js", (definition.Data.Execution as NodeJSActionExecutionData).Script); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsContainerActionDefinitionDockerfile_SelfRepo() + { + try + { + // Arrange. + Setup(); + // Prepare the task.json content. + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar +"; + Pipelines.ActionStep instance; + string directory; + CreateSelfRepoAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as ContainerActionExecutionData)); // execution.Node + Assert.Equal("Dockerfile", (definition.Data.Execution as ContainerActionExecutionData).Image); + Assert.Equal("main.sh", (definition.Data.Execution as ContainerActionExecutionData).EntryPoint); + + foreach (var arg in (definition.Data.Execution as ContainerActionExecutionData).Arguments) + { + Assert.Equal("${{ inputs.greeting }}", arg.AssertScalar("arg").ToString()); + } + + foreach (var env in (definition.Data.Execution as ContainerActionExecutionData).Environment) + { + var key = env.Key.AssertString("key").Value; + if (key == "Token") + { + Assert.Equal("foo", env.Value.AssertString("value").Value); + } + else if (key == "Url") + { + Assert.Equal("bar", env.Value.AssertString("value").Value); + } + else + { + throw new NotSupportedException(key); + } + } + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsContainerActionDefinitionRegistry_SelfRepo() + { + try + { + // Arrange. + Setup(); + // Prepare the task.json content. + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'docker://ubuntu:16.04' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: ${{inputs.greeting}} +"; + Pipelines.ActionStep instance; + string directory; + CreateSelfRepoAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as ContainerActionExecutionData)); + Assert.Equal("docker://ubuntu:16.04", (definition.Data.Execution as ContainerActionExecutionData).Image); + Assert.Equal("main.sh", (definition.Data.Execution as ContainerActionExecutionData).EntryPoint); + + foreach (var arg in (definition.Data.Execution as ContainerActionExecutionData).Arguments) + { + Assert.Equal("${{ inputs.greeting }}", arg.AssertScalar("arg").ToString()); + } + + foreach (var env in (definition.Data.Execution as ContainerActionExecutionData).Environment) + { + var key = env.Key.AssertString("key").Value; + if (key == "Token") + { + Assert.Equal("foo", env.Value.AssertString("value").Value); + } + else if (key == "Url") + { + Assert.Equal("${{ inputs.greeting }}", env.Value.AssertScalar("value").ToString()); + } + else + { + throw new NotSupportedException(key); + } + } + + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsNodeActionDefinition_SelfRepo() + { + try + { + // Arrange. + Setup(); + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node12' + main: 'task.js' +"; + Pipelines.ActionStep instance; + string directory; + CreateSelfRepoAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as NodeJSActionExecutionData)); + Assert.Equal("task.js", (definition.Data.Execution as NodeJSActionExecutionData).Script); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsNodeActionDefinition_Cleanup() + { + try + { + // Arrange. + Setup(); + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node12' + main: 'task.js' + post: 'cleanup.js' +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as NodeJSActionExecutionData)); + Assert.Equal("task.js", (definition.Data.Execution as NodeJSActionExecutionData).Script); + Assert.Equal("cleanup.js", (definition.Data.Execution as NodeJSActionExecutionData).Cleanup); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsContainerActionDefinitionDockerfile_Cleanup() + { + try + { + // Arrange. + Setup(); + // Prepare the task.json content. + const string Content = @" +# Container action +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'GitHub' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar + post-entrypoint: 'cleanup.sh' +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + _actionManager.CachedActionContainers[instance.Id] = new ContainerInfo() { ContainerImage = "image:1234" }; + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as ContainerActionExecutionData)); // execution.Node + Assert.Equal("image:1234", (definition.Data.Execution as ContainerActionExecutionData).Image); + Assert.Equal("main.sh", (definition.Data.Execution as ContainerActionExecutionData).EntryPoint); + Assert.Equal("cleanup.sh", (definition.Data.Execution as ContainerActionExecutionData).Cleanup); + + foreach (var arg in (definition.Data.Execution as ContainerActionExecutionData).Arguments) + { + Assert.Equal("${{ inputs.greeting }}", arg.AssertScalar("arg").ToString()); + } + + foreach (var env in (definition.Data.Execution as ContainerActionExecutionData).Environment) + { + var key = env.Key.AssertString("key").Value; + if (key == "Token") + { + Assert.Equal("foo", env.Value.AssertString("value").Value); + } + else if (key == "Url") + { + Assert.Equal("bar", env.Value.AssertString("value").Value); + } + else + { + throw new NotSupportedException(key); + } + } + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsPluginActionDefinition() + { + try + { + // Arrange. + Setup(); + const string Content = @" +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + plugin: 'someplugin' +"; + Pipelines.ActionStep instance; + string directory; + CreateAction(yamlContent: Content, instance: out instance, directory: out directory); + + // Act. + Definition definition = _actionManager.LoadAction(_ec.Object, instance); + + // Assert. + Assert.NotNull(definition); + Assert.Equal(directory, definition.Directory); + Assert.NotNull(definition.Data); + Assert.NotNull(definition.Data.Inputs); // inputs + Dictionary inputDefaults = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var input in definition.Data.Inputs) + { + var name = input.Key.AssertString("key").Value; + var value = input.Value.AssertScalar("value").ToString(); + + _hc.GetTrace().Info($"Default: {name} = {value}"); + inputDefaults[name] = value; + } + + Assert.Equal(2, inputDefaults.Count); + Assert.True(inputDefaults.ContainsKey("greeting")); + Assert.Equal("Hello", inputDefaults["greeting"]); + Assert.True(string.IsNullOrEmpty(inputDefaults["entryPoint"])); + Assert.NotNull(definition.Data.Execution); // execution + + Assert.NotNull((definition.Data.Execution as PluginActionExecutionData)); + Assert.Equal("plugin.class, plugin", (definition.Data.Execution as PluginActionExecutionData).Plugin); + Assert.Equal("plugin.cleanup, plugin", (definition.Data.Execution as PluginActionExecutionData).Cleanup); + } + finally + { + Teardown(); + } + } + + private void CreateAction(string yamlContent, out Pipelines.ActionStep instance, out string directory) + { + directory = Path.Combine(_workFolder, Constants.Path.ActionsDirectory, "GitHub/actions".Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), "master"); + string file = Path.Combine(directory, Constants.Path.ActionManifestFile); + Directory.CreateDirectory(Path.GetDirectoryName(file)); + File.WriteAllText(file, yamlContent); + instance = new Pipelines.ActionStep() + { + Id = Guid.NewGuid(), + Reference = new Pipelines.RepositoryPathReference() + { + Name = "GitHub/actions", + Ref = "master", + RepositoryType = Pipelines.RepositoryTypes.GitHub + } + }; + } + + private void CreateSelfRepoAction(string yamlContent, out Pipelines.ActionStep instance, out string directory) + { + directory = Path.Combine(_workFolder, "actions", "actions"); + string file = Path.Combine(directory, Constants.Path.ActionManifestFile); + Directory.CreateDirectory(Path.GetDirectoryName(file)); + File.WriteAllText(file, yamlContent); + instance = new Pipelines.ActionStep() + { + Id = Guid.NewGuid(), + Reference = new Pipelines.RepositoryPathReference() + { + Name = "GitHub/actions", + Ref = "master", + RepositoryType = Pipelines.PipelineConstants.SelfAlias + } + }; + } + + + private void Setup([CallerMemberName] string name = "") + { + _ecTokenSource?.Dispose(); + _ecTokenSource = new CancellationTokenSource(); + + // Test host context. + _hc = new TestHostContext(this, name); + + // Random work folder. + _workFolder = _hc.GetDirectory(WellKnownDirectory.Work); + + _ec = new Mock(); + _ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token); + _ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary())); + _ec.Setup(x => x.Write(It.IsAny(), It.IsAny())).Callback((string tag, string message) => { _hc.GetTrace().Info($"[{tag}]{message}"); }); + _ec.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); }); + _ec.Setup(x => x.GetGitHubContext("workspace")).Returns(Path.Combine(_workFolder, "actions", "actions")); + + _dockerManager = new Mock(); + _dockerManager.Setup(x => x.DockerPull(_ec.Object, "ubuntu:16.04")).Returns(Task.FromResult(0)); + _dockerManager.Setup(x => x.DockerPull(_ec.Object, "ubuntu:100.04")).Returns(Task.FromResult(1)); + + _dockerManager.Setup(x => x.DockerBuild(_ec.Object, It.IsAny(), It.IsAny(), It.IsAny())).Returns(Task.FromResult(0)); + + _pluginManager = new Mock(); + _pluginManager.Setup(x => x.GetPluginAction(It.IsAny())).Returns(new RunnerPluginActionInfo() { PluginTypeName = "plugin.class, plugin", PostPluginTypeName = "plugin.cleanup, plugin" }); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + _hc.SetSingleton(_dockerManager.Object); + _hc.SetSingleton(_pluginManager.Object); + _hc.SetSingleton(actionManifest); + + var proxy = new RunnerWebProxy(); + proxy.Initialize(_hc); + _hc.SetSingleton(proxy); + + _configurationStore = new Mock(); + _configurationStore + .Setup(x => x.GetSettings()) + .Returns( + new RunnerSettings + { + WorkFolder = _workFolder + }); + _hc.SetSingleton(_configurationStore.Object); + + var pInvoker = new ProcessInvokerWrapper(); + pInvoker.Initialize(_hc); + _hc.EnqueueInstance(pInvoker); + + // Instance to test. + _actionManager = new ActionManager(); + _actionManager.Initialize(_hc); + + Environment.SetEnvironmentVariable("GITHUB_ACTION_DOWNLOAD_NO_BACKOFF", "1"); + } + + private void Teardown() + { + _hc?.Dispose(); + if (!string.IsNullOrEmpty(_workFolder) && Directory.Exists(_workFolder)) + { + Directory.Delete(_workFolder, recursive: true); + } + } + + private class ExceptionStream : Stream + { + public override bool CanRead => throw new NotImplementedException(); + + public override bool CanSeek => throw new NotImplementedException(); + + public override bool CanWrite => throw new NotImplementedException(); + + public override long Length => throw new NotImplementedException(); + + public override long Position { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } + + public override void Flush() + { + throw new NotImplementedException(); + } + + public override int Read(byte[] buffer, int offset, int count) + { + throw new NotImplementedException(); + } + + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotImplementedException(); + } + + public override void SetLength(long value) + { + throw new NotImplementedException(); + } + + public override void Write(byte[] buffer, int offset, int count) + { + throw new NotImplementedException(); + } + } + } +} diff --git a/src/Test/L0/Worker/ActionManifestManagerL0.cs b/src/Test/L0/Worker/ActionManifestManagerL0.cs new file mode 100644 index 00000000000..821e969f751 --- /dev/null +++ b/src/Test/L0/Worker/ActionManifestManagerL0.cs @@ -0,0 +1,497 @@ +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Worker; +using Moq; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Runtime.CompilerServices; +using System.Threading; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class ActionManifestManagerL0 + { + private CancellationTokenSource _ecTokenSource; + private Mock _ec; + private TestHostContext _hc; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_ContainerAction_Dockerfile() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction.yml")); + + //Assert + + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.Container); + + var containerAction = result.Execution as ContainerActionExecutionData; + + Assert.Equal(containerAction.Image, "Dockerfile"); + Assert.Equal(containerAction.EntryPoint, "main.sh"); + Assert.Equal(containerAction.Arguments[0].ToString(), "bzz"); + Assert.Equal(containerAction.Environment[0].Key.ToString(), "Token"); + Assert.Equal(containerAction.Environment[0].Value.ToString(), "foo"); + Assert.Equal(containerAction.Environment[1].Key.ToString(), "Url"); + Assert.Equal(containerAction.Environment[1].Value.ToString(), "bar"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_ContainerAction_Dockerfile_Post() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_cleanup.yml")); + + //Assert + + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.Container); + + var containerAction = result.Execution as ContainerActionExecutionData; + + Assert.Equal(containerAction.Image, "Dockerfile"); + Assert.Equal(containerAction.EntryPoint, "main.sh"); + Assert.Equal(containerAction.Cleanup, "cleanup.sh"); + Assert.Equal(containerAction.CleanupCondition, "failure()"); + Assert.Equal(containerAction.Arguments[0].ToString(), "bzz"); + Assert.Equal(containerAction.Environment[0].Key.ToString(), "Token"); + Assert.Equal(containerAction.Environment[0].Value.ToString(), "foo"); + Assert.Equal(containerAction.Environment[1].Key.ToString(), "Url"); + Assert.Equal(containerAction.Environment[1].Value.ToString(), "bar"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_ContainerAction_NoArgsNoEnv() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_noargs_noenv_noentrypoint.yml")); + + //Assert + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.Container); + + var containerAction = result.Execution as ContainerActionExecutionData; + + Assert.Equal(containerAction.Image, "Dockerfile"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_ContainerAction_Dockerfile_Expression() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerfileaction_arg_env_expression.yml")); + + //Assert + + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.Container); + + var containerAction = result.Execution as ContainerActionExecutionData; + + Assert.Equal(containerAction.Image, "Dockerfile"); + Assert.Equal(containerAction.EntryPoint, "main.sh"); + Assert.Equal(containerAction.Arguments[0].ToString(), "${{ inputs.greeting }}"); + Assert.Equal(containerAction.Environment[0].Key.ToString(), "Token"); + Assert.Equal(containerAction.Environment[0].Value.ToString(), "foo"); + Assert.Equal(containerAction.Environment[1].Key.ToString(), "Url"); + Assert.Equal(containerAction.Environment[1].Value.ToString(), "${{ inputs.entryPoint }}"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_ContainerAction_DockerHub() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "dockerhubaction.yml")); + + //Assert + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.Container); + + var containerAction = result.Execution as ContainerActionExecutionData; + + Assert.Equal(containerAction.Image, "docker://ubuntu:18.04"); + Assert.Equal(containerAction.EntryPoint, "main.sh"); + Assert.Equal(containerAction.Arguments[0].ToString(), "bzz"); + Assert.Equal(containerAction.Environment[0].Key.ToString(), "Token"); + Assert.Equal(containerAction.Environment[0].Value.ToString(), "foo"); + Assert.Equal(containerAction.Environment[1].Key.ToString(), "Url"); + Assert.Equal(containerAction.Environment[1].Value.ToString(), "bar"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_NodeAction() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction.yml")); + + //Assert + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + Assert.Equal(result.Deprecated.Count, 1); + + Assert.True(result.Deprecated.ContainsKey("greeting")); + result.Deprecated.TryGetValue("greeting", out string value); + Assert.Equal(value, "This property has been deprecated"); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.NodeJS); + + var nodeAction = result.Execution as NodeJSActionExecutionData; + + Assert.Equal(nodeAction.Script, "main.js"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_NodeAction_Cleanup() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "nodeaction_cleanup.yml")); + + //Assert + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + Assert.Equal(result.Deprecated.Count, 1); + + Assert.True(result.Deprecated.ContainsKey("greeting")); + result.Deprecated.TryGetValue("greeting", out string value); + Assert.Equal(value, "This property has been deprecated"); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.NodeJS); + + var nodeAction = result.Execution as NodeJSActionExecutionData; + + Assert.Equal(nodeAction.Script, "main.js"); + Assert.Equal(nodeAction.Cleanup, "cleanup.js"); + Assert.Equal(nodeAction.CleanupCondition, "cancelled()"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Load_PluginAction() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + //Act + var result = actionManifest.Load(_ec.Object, Path.Combine(TestUtil.GetTestDataPath(), "pluginaction.yml")); + + //Assert + Assert.Equal(result.Name, "Hello World"); + Assert.Equal(result.Description, "Greet the world and record the time"); + Assert.Equal(result.Inputs.Count, 2); + Assert.Equal(result.Inputs[0].Key.AssertString("key").Value, "greeting"); + Assert.Equal(result.Inputs[0].Value.AssertString("value").Value, "Hello"); + Assert.Equal(result.Inputs[1].Key.AssertString("key").Value, "entryPoint"); + Assert.Equal(result.Inputs[1].Value.AssertString("value").Value, ""); + + Assert.Equal(result.Execution.ExecutionType, ActionExecutionType.Plugin); + + var pluginAction = result.Execution as PluginActionExecutionData; + + Assert.Equal(pluginAction.Plugin, "someplugin"); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Evaluate_ContainerAction_Args() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + var arguments = new SequenceToken(null, null, null); + arguments.Add(new BasicExpressionToken(null, null, null, "inputs.greeting")); + arguments.Add(new StringToken(null, null, null, "test")); + + var inputsContext = new DictionaryContextData(); + inputsContext.Add("greeting", new StringContextData("hello")); + + var evaluateContext = new Dictionary(StringComparer.OrdinalIgnoreCase); + evaluateContext["inputs"] = inputsContext; + //Act + + var result = actionManifest.EvaluateContainerArguments(_ec.Object, arguments, evaluateContext); + + //Assert + Assert.Equal(result[0], "hello"); + Assert.Equal(result[1], "test"); + Assert.Equal(result.Count, 2); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Evaluate_ContainerAction_Env() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + var environment = new MappingToken(null, null, null); + environment.Add(new StringToken(null, null, null, "hello"), new BasicExpressionToken(null, null, null, "inputs.greeting")); + environment.Add(new StringToken(null, null, null, "test"), new StringToken(null, null, null, "test")); + + var inputsContext = new DictionaryContextData(); + inputsContext.Add("greeting", new StringContextData("hello")); + + var evaluateContext = new Dictionary(StringComparer.OrdinalIgnoreCase); + evaluateContext["inputs"] = inputsContext; + + //Act + var result = actionManifest.EvaluateContainerEnvironment(_ec.Object, environment, evaluateContext); + + //Assert + Assert.Equal(result["hello"], "hello"); + Assert.Equal(result["test"], "test"); + Assert.Equal(result.Count, 2); + } + finally + { + Teardown(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Evaluate_Default_Input() + { + try + { + //Arrange + Setup(); + + var actionManifest = new ActionManifestManager(); + actionManifest.Initialize(_hc); + + var githubContext = new DictionaryContextData(); + githubContext.Add("ref", new StringContextData("refs/heads/master")); + + var evaluateContext = new Dictionary(StringComparer.OrdinalIgnoreCase); + evaluateContext["github"] = githubContext; + evaluateContext["strategy"] = new DictionaryContextData(); + evaluateContext["matrix"] = new DictionaryContextData(); + evaluateContext["steps"] = new DictionaryContextData(); + evaluateContext["job"] = new DictionaryContextData(); + evaluateContext["runner"] = new DictionaryContextData(); + evaluateContext["env"] = new DictionaryContextData(); + + //Act + var result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", new StringToken(null, null, null, "defaultValue"), evaluateContext); + + //Assert + Assert.Equal(result, "defaultValue"); + + //Act + result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", new BasicExpressionToken(null, null, null, "github.ref"), evaluateContext); + + //Assert + Assert.Equal(result, "refs/heads/master"); + } + finally + { + Teardown(); + } + } + + private void Setup([CallerMemberName] string name = "") + { + _ecTokenSource?.Dispose(); + _ecTokenSource = new CancellationTokenSource(); + + // Test host context. + _hc = new TestHostContext(this, name); + + _ec = new Mock(); + _ec.Setup(x => x.WriteDebug).Returns(true); + _ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token); + _ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary())); + _ec.Setup(x => x.Write(It.IsAny(), It.IsAny())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); }); + _ec.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); }); + } + + private void Teardown() + { + _hc?.Dispose(); + } + } +} diff --git a/src/Test/L0/Worker/ActionRunnerL0.cs b/src/Test/L0/Worker/ActionRunnerL0.cs new file mode 100644 index 00000000000..141a5cd461f --- /dev/null +++ b/src/Test/L0/Worker/ActionRunnerL0.cs @@ -0,0 +1,359 @@ +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.DistributedTask.Pipelines; +using GitHub.DistributedTask.Pipelines.ContextData; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Worker; +using GitHub.Runner.Worker.Container; +using GitHub.Runner.Worker.Handlers; +using Moq; +using Newtonsoft.Json.Linq; +using System; +using System.Collections.Generic; +using System.IO; +using System.IO.Compression; +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Xunit; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class ActionRunnerL0 + { + private CancellationTokenSource _ecTokenSource; + private Mock _handlerFactory; + private Mock _actionManager; + private Mock _defaultStepHost; + private Mock _ec; + private TestHostContext _hc; + private ActionRunner _actionRunner; + private IActionManifestManager _actionManifestManager; + private string _workFolder; + private DictionaryContextData _context = new DictionaryContextData(); + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void MergeDefaultInputs() + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actionInputs = new MappingToken(null, null, null); + actionInputs.Add(new StringToken(null, null, null, "input1"), new StringToken(null, null, null, "test1")); + actionInputs.Add(new StringToken(null, null, null, "input2"), new StringToken(null, null, null, "test2")); + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:16.04" + }, + Inputs = actionInputs + }; + + _actionRunner.Action = action; + + Dictionary finialInputs = new Dictionary(); + _handlerFactory.Setup(x => x.Create(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>(), It.IsAny>(), It.IsAny(), It.IsAny())) + .Callback((IExecutionContext executionContext, Pipelines.ActionStepDefinitionReference actionReference, IStepHost stepHost, ActionExecutionData data, Dictionary inputs, Dictionary environment, Variables runtimeVariables, string taskDirectory) => + { + finialInputs = inputs; + }) + .Returns(new Mock().Object); + + //Act + await _actionRunner.RunAsync(); + + foreach (var input in finialInputs) + { + _hc.GetTrace().Info($"Input: {input.Key}={input.Value}"); + } + + //Assert + Assert.Equal(finialInputs["input1"], "test1"); + Assert.Equal(finialInputs["input2"], "test2"); + Assert.Equal(finialInputs["input3"], "github"); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void WriteEventPayload() + { + //Arrange + Setup(); + var actionId = Guid.NewGuid(); + var actionInputs = new MappingToken(null, null, null); + actionInputs.Add(new StringToken(null, null, null, "input1"), new StringToken(null, null, null, "test1")); + actionInputs.Add(new StringToken(null, null, null, "input2"), new StringToken(null, null, null, "test2")); + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "ubuntu:16.04" + }, + Inputs = actionInputs + }; + + _actionRunner.Action = action; + + Dictionary finialInputs = new Dictionary(); + _handlerFactory.Setup(x => x.Create(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>(), It.IsAny>(), It.IsAny(), It.IsAny())) + .Callback((IExecutionContext executionContext, Pipelines.ActionStepDefinitionReference actionReference, IStepHost stepHost, ActionExecutionData data, Dictionary inputs, Dictionary environment, Variables runtimeVariables, string taskDirectory) => + { + finialInputs = inputs; + }) + .Returns(new Mock().Object); + + //Act + await _actionRunner.RunAsync(); + + //Assert + _ec.Verify(x => x.SetGitHubContext("event_path", Path.Combine(_hc.GetDirectory(WellKnownDirectory.Temp), "_github_workflow", "event.json")), Times.Once); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void EvaluateLegacyDisplayName() + { + // Arrange + Setup(); + var actionInputs = new MappingToken(null, null, null); + actionInputs.Add(new StringToken(null, null, null, "script"), new StringToken(null, null, null, "echo hello world")); + var actionId = Guid.NewGuid(); + var actionDisplayName = "Run echo hello world"; + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + DisplayName = actionDisplayName, + Inputs = actionInputs, + }; + + _actionRunner.Action = action; + + var matrixData = new DictionaryContextData + { + ["node"] = new NumberContextData(8) + }; + _context.Add("matrix", matrixData); + + // Act + // Should not do anything if we don't have a displayNameToken to expand + var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext); + + // Assert + Assert.False(didUpdateDisplayName); + Assert.Equal(actionDisplayName, _actionRunner.DisplayName); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void EvaluateExpansionOfDisplayNameToken() + { + // Arrange + Setup(); + var actionId = Guid.NewGuid(); + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + DisplayNameToken = new BasicExpressionToken(null, null, null, "matrix.node"), + }; + + _actionRunner.Action = action; + var expectedString = "8"; + + var matrixData = new DictionaryContextData + { + ["node"] = new StringContextData(expectedString) + }; + _context.Add("matrix", matrixData); + + // Act + // Should expand the displaynameToken and set the display name to that + var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext); + + // Assert + Assert.True(didUpdateDisplayName); + Assert.Equal(expectedString, _actionRunner.DisplayName); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void EvaluateExpansionOfScriptDisplayName() + { + // Arrange + Setup(); + var actionInputs = new MappingToken(null, null, null); + actionInputs.Add(new StringToken(null, null, null, "script"), new BasicExpressionToken(null, null, null, "matrix.node")); + var actionId = Guid.NewGuid(); + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Inputs = actionInputs, + Reference = new Pipelines.ScriptReference() + }; + + _actionRunner.Action = action; + + var matrixData = new DictionaryContextData + { + ["node"] = new StringContextData("8") + }; + _context.Add("matrix", matrixData); + + // Act + // Should expand the displaynameToken and set the display name to that + var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext); + + // Assert + Assert.True(didUpdateDisplayName); + Assert.Equal("Run 8", _actionRunner.DisplayName); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void EvaluateExpansionOfContainerDisplayName() + { + // Arrange + Setup(); + var actionId = Guid.NewGuid(); + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + Reference = new Pipelines.ContainerRegistryReference() + { + Image = "TestImageName:latest" + } + }; + _actionRunner.Action = action; + + // Act + // Should expand the displaynameToken and set the display name to that + var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext); + + // Assert + Assert.True(didUpdateDisplayName); + Assert.Equal("Run TestImageName:latest", _actionRunner.DisplayName); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void EvaluateDisplayNameWithoutContext() + { + // Arrange + Setup(); + var actionId = Guid.NewGuid(); + var action = new Pipelines.ActionStep() + { + Name = "action", + Id = actionId, + DisplayNameToken = new BasicExpressionToken(null, null, null, "matrix.node"), + }; + + _actionRunner.Action = action; + + // Act + // Should not do anything if we don't have context on the display name + var didUpdateDisplayName = _actionRunner.TryEvaluateDisplayName(_context, _actionRunner.ExecutionContext); + + // Assert + Assert.False(didUpdateDisplayName); + // Should use the pretty display name until we can eval + Assert.Equal("${{ matrix.node }}", _actionRunner.DisplayName); + } + + private void CreateAction(string yamlContent, out Pipelines.ActionStep instance, out string directory) + { + directory = Path.Combine(_workFolder, Constants.Path.ActionsDirectory, "GitHub/actions".Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), "master"); + string file = Path.Combine(directory, Constants.Path.ActionManifestFile); + Directory.CreateDirectory(Path.GetDirectoryName(file)); + File.WriteAllText(file, yamlContent); + instance = new Pipelines.ActionStep() + { + Id = Guid.NewGuid(), + Reference = new Pipelines.RepositoryPathReference() + { + Name = "GitHub/actions", + Ref = "master", + RepositoryType = Pipelines.RepositoryTypes.GitHub + } + }; + } + + private void Setup([CallerMemberName] string name = "") + { + _ecTokenSource?.Dispose(); + _ecTokenSource = new CancellationTokenSource(); + + // Test host context. + _hc = new TestHostContext(this, name); + + var actionInputs = new MappingToken(null, null, null); + actionInputs.Add(new StringToken(null, null, null, "input1"), new StringToken(null, null, null, "input1")); + actionInputs.Add(new StringToken(null, null, null, "input2"), new StringToken(null, null, null, "")); + actionInputs.Add(new StringToken(null, null, null, "input3"), new StringToken(null, null, null, "github")); + var actionDefinition = new Definition() + { + Directory = _hc.GetDirectory(WellKnownDirectory.Work), + Data = new ActionDefinitionData() + { + Name = name, + Description = name, + Inputs = actionInputs, + Execution = new ScriptActionExecutionData() + } + }; + + // Mocks. + _actionManager = new Mock(); + _actionManager.Setup(x => x.LoadAction(It.IsAny(), It.IsAny())).Returns(actionDefinition); + + _handlerFactory = new Mock(); + _defaultStepHost = new Mock(); + _actionManifestManager = new ActionManifestManager(); + _actionManifestManager.Initialize(_hc); + + var githubContext = new GitHubContext(); + githubContext.Add("event", JToken.Parse("{\"foo\":\"bar\"}").ToPipelineContextData()); + _context.Add("github", githubContext); + + _ec = new Mock(); + _ec.Setup(x => x.ExpressionValues).Returns(_context); + _ec.Setup(x => x.IntraActionState).Returns(new Dictionary()); + _ec.Setup(x => x.EnvironmentVariables).Returns(new Dictionary()); + _ec.Setup(x => x.SetGitHubContext(It.IsAny(), It.IsAny())); + _ec.Setup(x => x.GetGitHubContext(It.IsAny())).Returns("{\"foo\":\"bar\"}"); + _ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token); + _ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary())); + _ec.Setup(x => x.Write(It.IsAny(), It.IsAny())).Callback((string tag, string message) => { _hc.GetTrace().Info($"[{tag}]{message}"); }); + _ec.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); }); + + _hc.SetSingleton(_actionManager.Object); + _hc.SetSingleton(_handlerFactory.Object); + _hc.SetSingleton(_actionManifestManager); + + _hc.EnqueueInstance(_defaultStepHost.Object); + + // Instance to test. + _actionRunner = new ActionRunner(); + _actionRunner.Initialize(_hc); + _actionRunner.ExecutionContext = _ec.Object; + } + } +} diff --git a/src/Test/L0/Worker/ExecutionContextL0.cs b/src/Test/L0/Worker/ExecutionContextL0.cs new file mode 100644 index 00000000000..d39d6f2954a --- /dev/null +++ b/src/Test/L0/Worker/ExecutionContextL0.cs @@ -0,0 +1,259 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Worker; +using Moq; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using Xunit; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class ExecutionContextL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void AddIssue_CountWarningsErrors() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange: Create a job request message. + TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); + TimelineReference timeline = new TimelineReference(); + JobEnvironment environment = new JobEnvironment(); + environment.SystemConnection = new ServiceEndpoint(); + List tasks = new List(); + Guid JobId = Guid.NewGuid(); + string jobName = "some job name"; + var jobRequest = Pipelines.AgentJobRequestMessageUtil.Convert(new AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, environment, tasks)); + jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() + { + Alias = Pipelines.PipelineConstants.SelfAlias, + Id = "github", + Version = "sha1" + }); + jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + + // Arrange: Setup the paging logger. + var pagingLogger = new Mock(); + var jobServerQueue = new Mock(); + jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.IsAny())); + + hc.EnqueueInstance(pagingLogger.Object); + hc.SetSingleton(jobServerQueue.Object); + + var ec = new Runner.Worker.ExecutionContext(); + ec.Initialize(hc); + + // Act. + ec.InitializeJob(jobRequest, CancellationToken.None); + + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + ec.AddIssue(new Issue() { Type = IssueType.Error, Message = "error" }); + + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + ec.AddIssue(new Issue() { Type = IssueType.Warning, Message = "warning" }); + + ec.Complete(); + + // Assert. + jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.Is(t => t.ErrorCount == 15)), Times.AtLeastOnce); + jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.Is(t => t.WarningCount == 14)), Times.AtLeastOnce); + jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.Is(t => t.Issues.Where(i => i.Type == IssueType.Error).Count() == 10)), Times.AtLeastOnce); + jobServerQueue.Verify(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.Is(t => t.Issues.Where(i => i.Type == IssueType.Warning).Count() == 10)), Times.AtLeastOnce); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Debug_Multilines() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange: Create a job request message. + TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); + TimelineReference timeline = new TimelineReference(); + JobEnvironment environment = new JobEnvironment(); + environment.SystemConnection = new ServiceEndpoint(); + List tasks = new List(); + Guid JobId = Guid.NewGuid(); + string jobName = "some job name"; + var jobRequest = Pipelines.AgentJobRequestMessageUtil.Convert(new AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, environment, tasks)); + jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() + { + Alias = Pipelines.PipelineConstants.SelfAlias, + Id = "github", + Version = "sha1" + }); + jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + jobRequest.Variables["ACTIONS_STEP_DEBUG"] = "true"; + + // Arrange: Setup the paging logger. + var pagingLogger = new Mock(); + var jobServerQueue = new Mock(); + jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.IsAny())); + jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny(), It.IsAny())).Callback((Guid id, string msg) => { hc.GetTrace().Info(msg); }); + + hc.EnqueueInstance(pagingLogger.Object); + hc.SetSingleton(jobServerQueue.Object); + + var ec = new Runner.Worker.ExecutionContext(); + ec.Initialize(hc); + + // Act. + ec.InitializeJob(jobRequest, CancellationToken.None); + + ec.Debug(null); + ec.Debug(""); + ec.Debug("\n"); + ec.Debug("\r\n"); + ec.Debug("test"); + ec.Debug("te\nst"); + ec.Debug("te\r\nst"); + + ec.Complete(); + + jobServerQueue.Verify(x => x.QueueWebConsoleLine(It.IsAny(), It.IsAny()), Times.Exactly(10)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void RegisterPostJobAction_ShareState() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange: Create a job request message. + TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); + TimelineReference timeline = new TimelineReference(); + JobEnvironment environment = new JobEnvironment(); + environment.SystemConnection = new ServiceEndpoint(); + List tasks = new List(); + Guid JobId = Guid.NewGuid(); + string jobName = "some job name"; + var jobRequest = Pipelines.AgentJobRequestMessageUtil.Convert(new AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, environment, tasks)); + jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource() + { + Alias = Pipelines.PipelineConstants.SelfAlias, + Id = "github", + Version = "sha1" + }); + jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData(); + jobRequest.Variables["ACTIONS_STEP_DEBUG"] = "true"; + + // Arrange: Setup the paging logger. + var pagingLogger1 = new Mock(); + var pagingLogger2 = new Mock(); + var pagingLogger3 = new Mock(); + var pagingLogger4 = new Mock(); + var pagingLogger5 = new Mock(); + var jobServerQueue = new Mock(); + jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny(), It.IsAny())); + jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny(), It.IsAny())).Callback((Guid id, string msg) => { hc.GetTrace().Info(msg); }); + + var actionRunner1 = new ActionRunner(); + actionRunner1.Initialize(hc); + var actionRunner2 = new ActionRunner(); + actionRunner2.Initialize(hc); + + hc.EnqueueInstance(pagingLogger1.Object); + hc.EnqueueInstance(pagingLogger2.Object); + hc.EnqueueInstance(pagingLogger3.Object); + hc.EnqueueInstance(pagingLogger4.Object); + hc.EnqueueInstance(pagingLogger5.Object); + hc.EnqueueInstance(actionRunner1 as IActionRunner); + hc.EnqueueInstance(actionRunner2 as IActionRunner); + hc.SetSingleton(jobServerQueue.Object); + + var jobContext = new Runner.Worker.ExecutionContext(); + jobContext.Initialize(hc); + + // Act. + jobContext.InitializeJob(jobRequest, CancellationToken.None); + + var action1 = jobContext.CreateChild(Guid.NewGuid(), "action_1", "action_1", null, null); + action1.IntraActionState["state"] = "1"; + var action2 = jobContext.CreateChild(Guid.NewGuid(), "action_2", "action_2", null, null); + action2.IntraActionState["state"] = "2"; + + action1.RegisterPostJobAction("post1", "always()", new Pipelines.ActionStep() { Name = "post1", DisplayName = "Test 1", Reference = new Pipelines.RepositoryPathReference() { Name = "actions/action" } }); + action2.RegisterPostJobAction("post2", "always()", new Pipelines.ActionStep() { Name = "post2", DisplayName = "Test 2", Reference = new Pipelines.RepositoryPathReference() { Name = "actions/action" } }); + + Assert.NotNull(jobContext.JobSteps); + Assert.NotNull(jobContext.PostJobSteps); + Assert.Null(action1.JobSteps); + Assert.Null(action2.JobSteps); + Assert.Null(action1.PostJobSteps); + Assert.Null(action2.PostJobSteps); + + var post1 = jobContext.PostJobSteps.Pop(); + var post2 = jobContext.PostJobSteps.Pop(); + + Assert.Equal("post2", (post1 as IActionRunner).Action.Name); + Assert.Equal("post1", (post2 as IActionRunner).Action.Name); + + Assert.Equal(ActionRunStage.Post, (post1 as IActionRunner).Stage); + Assert.Equal(ActionRunStage.Post, (post2 as IActionRunner).Stage); + + Assert.Equal("always()", (post1 as IActionRunner).Condition); + Assert.Equal("always()", (post2 as IActionRunner).Condition); + + Assert.Equal("2", (post1 as IActionRunner).ExecutionContext.IntraActionState["state"]); + Assert.Equal("1", (post2 as IActionRunner).ExecutionContext.IntraActionState["state"]); + } + } + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + var hc = new TestHostContext(this, testName); + + // Arrange: Setup the configation store. + var configurationStore = new Mock(); + configurationStore.Setup(x => x.GetSettings()).Returns(new RunnerSettings()); + hc.SetSingleton(configurationStore.Object); + + // Arrange: Setup the proxy configation. + var proxy = new Mock(); + hc.SetSingleton(proxy.Object); + + // Arrange: Setup the cert configation. + var cert = new Mock(); + hc.SetSingleton(cert.Object); + + // Arrange: Create the execution context. + hc.SetSingleton(new Mock().Object); + + return hc; + } + } +} diff --git a/src/Test/L0/Worker/ExpressionManagerL0.cs b/src/Test/L0/Worker/ExpressionManagerL0.cs new file mode 100644 index 00000000000..9bdcdeeeeed --- /dev/null +++ b/src/Test/L0/Worker/ExpressionManagerL0.cs @@ -0,0 +1,186 @@ +using System; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Worker; +using Moq; +using Xunit; +using GitHub.DistributedTask.Expressions2; +using GitHub.DistributedTask.Pipelines.ContextData; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class ExpressionManagerL0 + { + private Mock _ec; + private ExpressionManager _expressionManager; + private DictionaryContextData _expressions; + private JobContext _jobContext; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void AlwaysFunction() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var variableSets = new[] + { + new { JobStatus = (ActionResult?)null, Expected = true }, + new { JobStatus = (ActionResult?)ActionResult.Cancelled, Expected = true }, + new { JobStatus = (ActionResult?)ActionResult.Failure, Expected = true }, + new { JobStatus = (ActionResult?)ActionResult.Success, Expected = true }, + }; + foreach (var variableSet in variableSets) + { + InitializeExecutionContext(hc); + _jobContext.Status = variableSet.JobStatus; + + // Act. + bool actual = _expressionManager.Evaluate(_ec.Object, "always()").Value; + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void CancelledFunction() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var variableSets = new[] + { + new { JobStatus = (ActionResult?)ActionResult.Cancelled, Expected = true }, + new { JobStatus = (ActionResult?)null, Expected = false }, + new { JobStatus = (ActionResult?)ActionResult.Failure, Expected = false }, + new { JobStatus = (ActionResult?)ActionResult.Success, Expected = false }, + }; + + foreach (var variableSet in variableSets) + { + InitializeExecutionContext(hc); + _jobContext.Status = variableSet.JobStatus; + + // Act. + bool actual = _expressionManager.Evaluate(_ec.Object, "cancelled()").Value; + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void FailureFunction() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var variableSets = new[] + { + new { JobStatus = (ActionResult?)ActionResult.Failure, Expected = true }, + new { JobStatus = (ActionResult?)null, Expected = false }, + new { JobStatus = (ActionResult?)ActionResult.Cancelled, Expected = false }, + new { JobStatus = (ActionResult?)ActionResult.Success, Expected = false }, + }; + foreach (var variableSet in variableSets) + { + InitializeExecutionContext(hc); + _jobContext.Status = variableSet.JobStatus; + + // Act. + bool actual = _expressionManager.Evaluate(_ec.Object, "failure()").Value; + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void SuccessFunction() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var variableSets = new[] + { + new { JobStatus = (ActionResult?)null, Expected = true }, + new { JobStatus = (ActionResult?)ActionResult.Success, Expected = true }, + new { JobStatus = (ActionResult?)ActionResult.Cancelled, Expected = false }, + new { JobStatus = (ActionResult?)ActionResult.Failure, Expected = false }, + }; + foreach (var variableSet in variableSets) + { + InitializeExecutionContext(hc); + _jobContext.Status = variableSet.JobStatus; + + // Act. + bool actual = _expressionManager.Evaluate(_ec.Object, "success()").Value; + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void ContextNamedValue() + { + using (TestHostContext hc = CreateTestContext()) + { + // Arrange. + var variableSets = new[] + { + new { Condition = "github.ref == 'refs/heads/master'", VariableName = "ref", VariableValue = "refs/heads/master", Expected = true }, + new { Condition = "github['ref'] == 'refs/heads/master'", VariableName = "ref", VariableValue = "refs/heads/master", Expected = true }, + new { Condition = "github.nosuch || '' == ''", VariableName = "ref", VariableValue = "refs/heads/master", Expected = true }, + new { Condition = "github['ref'] == 'refs/heads/release'", VariableName = "ref", VariableValue = "refs/heads/master", Expected = false }, + new { Condition = "github.ref == 'refs/heads/release'", VariableName = "ref", VariableValue = "refs/heads/master", Expected = false }, + }; + foreach (var variableSet in variableSets) + { + InitializeExecutionContext(hc); + _ec.Object.ExpressionValues["github"] = new GitHubContext() { { variableSet.VariableName, new StringContextData(variableSet.VariableValue) } }; + + // Act. + bool actual = _expressionManager.Evaluate(_ec.Object, variableSet.Condition).Value; + + // Assert. + Assert.Equal(variableSet.Expected, actual); + } + } + } + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + var hc = new TestHostContext(this, testName); + _expressionManager = new ExpressionManager(); + _expressionManager.Initialize(hc); + return hc; + } + + private void InitializeExecutionContext(TestHostContext hc) + { + _expressions = new DictionaryContextData(); + _jobContext = new JobContext(); + + _ec = new Mock(); + _ec.SetupAllProperties(); + _ec.Setup(x => x.ExpressionValues).Returns(_expressions); + _ec.Setup(x => x.JobContext).Returns(_jobContext); + } + } +} diff --git a/src/Test/L0/Worker/IssueMatcherL0.cs b/src/Test/L0/Worker/IssueMatcherL0.cs new file mode 100644 index 00000000000..c5e679f1563 --- /dev/null +++ b/src/Test/L0/Worker/IssueMatcherL0.cs @@ -0,0 +1,795 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using GitHub.Runner.Worker; +using GitHub.Services.WebApi; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class IssueMatcherL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Loop_MayNotBeSetOnSinglePattern() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^error: (.+)$"", + ""message"": 1, + ""loop"": true + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "^file: (.+)$", + File = 1, + }, + config.Matchers[0].Patterns[0], + }; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Loop_OnlyAllowedOnLastPattern() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(error)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^file: (.+)$"", + ""file"": 1, + ""loop"": true + }, + { + ""regexp"": ""^error: (.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns[1].Loop = false; + config.Matchers[0].Patterns[2].Loop = true; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Loop_MustSetMessage() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^file: (.+)$"", + ""message"": 1 + }, + { + ""regexp"": ""^file: (.+)$"", + ""file"": 1, + ""loop"": true + } + ] + } + ] +} +"); + + Assert.Throws(() => config.Validate()); + + config.Matchers[0].Patterns[1].Loop = false; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Message_AllowedInFirstPattern() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^file: (.+)$"", + ""message"": 1 + }, + { + ""regexp"": ""^error: (.+)$"", + ""file"": 1 + } + ] + } + ] +} +"); + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Message_Required() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^error: (.+)$"", + ""file"": 1 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns[0].File = null; + config.Matchers[0].Patterns[0].Message = 1; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Owner_Distinct() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^error: (.+)$"", + ""message"": 1 + } + ] + }, + { + ""owner"": ""MYmatcher"", + ""pattern"": [ + { + ""regexp"": ""^ERR: (.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Owner = "asdf"; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Owner_Required() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": """", + ""pattern"": [ + { + ""regexp"": ""^error: (.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Owner = "asdf"; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_Pattern_Required() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "^error: (.+)$", + Message = 1, + } + }; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_PropertyMayNotBeSetTwice() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^severity: (.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^file: (.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^(.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns[0].File = null; + config.Matchers[0].Patterns[0].Severity = 1; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_PropertyOutOfRange() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""message"": 2 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns[0].Message = 1; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Config_Validate_PropertyOutOfRange_LessThanZero() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""message"": -1 + } + ] + } + ] +} +"); + Assert.Throws(() => config.Validate()); + + // Sanity test + config.Matchers[0].Patterns[0].Message = 1; + config.Validate(); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_Loop_AccumulatesStatePerLine() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^(.+)$"", + ""code"": 1 + }, + { + ""regexp"": ""^message:(.+)$"", + ""message"": 1, + ""loop"": true + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("file1"); + Assert.Null(match); + match = matcher.Match("code1"); + Assert.Null(match); + match = matcher.Match("message:message1"); + Assert.Equal("file1", match.File); + Assert.Equal("code1", match.Code); + Assert.Equal("message1", match.Message); + match = matcher.Match("message:message1-2"); // sanity check loop + Assert.Equal("file1", match.File); + Assert.Equal("code1", match.Code); + Assert.Equal("message1-2", match.Message); + match = matcher.Match("abc"); // discarded + match = matcher.Match("file2"); + Assert.Null(match); + match = matcher.Match("code2"); + Assert.Null(match); + match = matcher.Match("message:message2"); + Assert.Equal("file2", match.File); + Assert.Equal("code2", match.Code); + Assert.Equal("message2", match.Message); + match = matcher.Match("abc"); // discarded + match = matcher.Match("abc"); // discarded + match = matcher.Match("file3"); + Assert.Null(match); + match = matcher.Match("code3"); + Assert.Null(match); + match = matcher.Match("message:message3"); + Assert.Equal("file3", match.File); + Assert.Equal("code3", match.Code); + Assert.Equal("message3", match.Message); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_Loop_BrokenMatchClearsState() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^(.+)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^message:(.+)$"", + ""message"": 1, + ""loop"": true + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("my-file.cs"); // file + Assert.Null(match); + match = matcher.Match("real-bad"); // severity + Assert.Null(match); + match = matcher.Match("message:not-working"); // message + Assert.Equal("my-file.cs", match.File); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("not-working", match.Message); + match = matcher.Match("message:problem"); // message + Assert.Equal("my-file.cs", match.File); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("problem", match.Message); + match = matcher.Match("other-file.cs"); // file - breaks the loop + Assert.Null(match); + match = matcher.Match("message:not-good"); // severity - also matches the message pattern, therefore + Assert.Null(match); // guarantees sufficient previous state has been cleared + match = matcher.Match("message:broken"); // message + Assert.Equal("other-file.cs", match.File); + Assert.Equal("message:not-good", match.Severity); + Assert.Equal("broken", match.Message); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_Loop_ExtractsProperties() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+) fromPath:(.+)$"", + ""file"": 1, + ""fromPath"": 2 + }, + { + ""regexp"": ""^severity:(.+)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^line:(.+) column:(.+) code:(.+) message:(.+)$"", + ""line"": 1, + ""column"": 2, + ""code"": 3, + ""message"": 4, + ""loop"": true + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("file:my-file.cs fromPath:my-project.proj"); + Assert.Null(match); + match = matcher.Match("severity:real-bad"); + Assert.Null(match); + match = matcher.Match("line:123 column:45 code:uh-oh message:not-working"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("my-project.proj", match.FromPath); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + match = matcher.Match("line:234 column:56 code:yikes message:broken"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("my-project.proj", match.FromPath); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("234", match.Line); + Assert.Equal("56", match.Column); + Assert.Equal("yikes", match.Code); + Assert.Equal("broken", match.Message); + match = matcher.Match("line:345 column:67 code:failed message:cant-do-that"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("my-project.proj", match.FromPath); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("345", match.Line); + Assert.Equal("67", match.Column); + Assert.Equal("failed", match.Code); + Assert.Equal("cant-do-that", match.Message); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_NonLoop_AccumulatesStatePerLine() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^(.+)$"", + ""code"": 1 + }, + { + ""regexp"": ""^message:(.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("file1"); + Assert.Null(match); + match = matcher.Match("code1"); + Assert.Null(match); + match = matcher.Match("message:message1"); + Assert.Equal("file1", match.File); + Assert.Equal("code1", match.Code); + Assert.Equal("message1", match.Message); + match = matcher.Match("abc"); // discarded + match = matcher.Match("file2"); + Assert.Null(match); + match = matcher.Match("code2"); + Assert.Null(match); + match = matcher.Match("message:message2"); + Assert.Equal("file2", match.File); + Assert.Equal("code2", match.Code); + Assert.Equal("message2", match.Message); + match = matcher.Match("abc"); // discarded + match = matcher.Match("abc"); // discarded + match = matcher.Match("file3"); + Assert.Null(match); + match = matcher.Match("code3"); + Assert.Null(match); + match = matcher.Match("message:message3"); + Assert.Equal("file3", match.File); + Assert.Equal("code3", match.Code); + Assert.Equal("message3", match.Message); + match = matcher.Match("message:message3"); // sanity check not loop + Assert.Null(match); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_NonLoop_DoesNotLoop() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^message:(.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("file:my-file.cs"); + Assert.Null(match); + match = matcher.Match("message:not-working"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("not-working", match.Message); + match = matcher.Match("message:not-working"); + Assert.Null(match); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_NonLoop_ExtractsProperties() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+) fromPath:(.+)$"", + ""file"": 1, + ""fromPath"": 2 + }, + { + ""regexp"": ""^severity:(.+)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^line:(.+) column:(.+) code:(.+) message:(.+)$"", + ""line"": 1, + ""column"": 2, + ""code"": 3, + ""message"": 4 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("file:my-file.cs fromPath:my-project.proj"); + Assert.Null(match); + match = matcher.Match("severity:real-bad"); + Assert.Null(match); + match = matcher.Match("line:123 column:45 code:uh-oh message:not-working"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("my-project.proj", match.FromPath); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + match = matcher.Match("line:123 column:45 code:uh-oh message:not-working"); // sanity check not loop + Assert.Null(match); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_MultiplePatterns_NonLoop_MatchClearsState() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""file"": 1 + }, + { + ""regexp"": ""^(.+)$"", + ""severity"": 1 + }, + { + ""regexp"": ""^(.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("my-file.cs"); // file + Assert.Null(match); + match = matcher.Match("real-bad"); // severity + Assert.Null(match); + match = matcher.Match("not-working"); // message + Assert.Equal("my-file.cs", match.File); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("not-working", match.Message); + match = matcher.Match("other-file.cs"); // file + Assert.Null(match); + match = matcher.Match("not-good"); // severity + Assert.Null(match); + match = matcher.Match("broken"); // message + Assert.Equal("other-file.cs", match.File); + Assert.Equal("not-good", match.Severity); + Assert.Equal("broken", match.Message); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_SetsOwner() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^(.+)$"", + ""message"": 1 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + Assert.Equal("myMatcher", matcher.Owner); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Matcher_SinglePattern_ExtractsProperties() + { + var config = JsonUtility.FromString(@" +{ + ""problemMatcher"": [ + { + ""owner"": ""myMatcher"", + ""pattern"": [ + { + ""regexp"": ""^file:(.+) line:(.+) column:(.+) severity:(.+) code:(.+) message:(.+) fromPath:(.+)$"", + ""file"": 1, + ""line"": 2, + ""column"": 3, + ""severity"": 4, + ""code"": 5, + ""message"": 6, + ""fromPath"": 7 + } + ] + } + ] +} +"); + config.Validate(); + var matcher = new IssueMatcher(config.Matchers[0], TimeSpan.FromSeconds(1)); + var match = matcher.Match("file:my-file.cs line:123 column:45 severity:real-bad code:uh-oh message:not-working fromPath:my-project.proj"); + Assert.Equal("my-file.cs", match.File); + Assert.Equal("123", match.Line); + Assert.Equal("45", match.Column); + Assert.Equal("real-bad", match.Severity); + Assert.Equal("uh-oh", match.Code); + Assert.Equal("not-working", match.Message); + Assert.Equal("my-project.proj", match.FromPath); + } + } +} diff --git a/src/Test/L0/Worker/JobExtensionL0.cs b/src/Test/L0/Worker/JobExtensionL0.cs new file mode 100644 index 00000000000..29c9729e65c --- /dev/null +++ b/src/Test/L0/Worker/JobExtensionL0.cs @@ -0,0 +1,386 @@ +// using GitHub.DistributedTask.WebApi; +// using GitHub.Runner.Worker; +// using Moq; +// using System; +// using System.Collections.Generic; +// using System.Linq; +// using System.Runtime.CompilerServices; +// using System.Threading.Tasks; +// using Xunit; +// using System.Threading; +// using Pipelines = GitHub.DistributedTask.Pipelines; + +// namespace GitHub.Runner.Common.Tests.Worker +// { +// public sealed class JobExtensionL0 +// { +// private class TestJobExtension : JobExtension +// { +// public override HostTypes HostType => HostTypes.None; + +// public override Type ExtensionType => typeof(IJobExtension); + +// public override void ConvertLocalPath(IExecutionContext context, string localPath, out string repoName, out string sourcePath) +// { +// repoName = ""; +// sourcePath = ""; +// } + +// public override IStep GetExtensionPostJobStep(IExecutionContext jobContext) +// { +// return null; +// } + +// public override IStep GetExtensionPreJobStep(IExecutionContext jobContext) +// { +// return null; +// } + +// public override string GetRootedPath(IExecutionContext context, string path) +// { +// return path; +// } + +// public override void InitializeJobExtension(IExecutionContext context, IList steps, Pipelines.WorkspaceOptions workspace) +// { +// return; +// } +// } + +// private IExecutionContext _jobEc; +// private Pipelines.AgentJobRequestMessage _message; +// private Mock _taskManager; +// private Mock _logPlugin; +// private Mock _jobServerQueue; +// private Mock _proxy; +// private Mock _cert; +// private Mock _config; +// private Mock _logger; +// private Mock _express; +// private Mock _containerProvider; +// private CancellationTokenSource _tokenSource; +// private TestHostContext CreateTestContext([CallerMemberName] String testName = "") +// { +// var hc = new TestHostContext(this, testName); +// _jobEc = new Runner.Worker.ExecutionContext(); +// _taskManager = new Mock(); +// _jobServerQueue = new Mock(); +// _config = new Mock(); +// _logger = new Mock(); +// _proxy = new Mock(); +// _cert = new Mock(); +// _express = new Mock(); +// _containerProvider = new Mock(); +// _logPlugin = new Mock(); + +// TaskRunner step1 = new TaskRunner(); +// TaskRunner step2 = new TaskRunner(); +// TaskRunner step3 = new TaskRunner(); +// TaskRunner step4 = new TaskRunner(); +// TaskRunner step5 = new TaskRunner(); +// TaskRunner step6 = new TaskRunner(); +// TaskRunner step7 = new TaskRunner(); +// TaskRunner step8 = new TaskRunner(); +// TaskRunner step9 = new TaskRunner(); +// TaskRunner step10 = new TaskRunner(); +// TaskRunner step11 = new TaskRunner(); +// TaskRunner step12 = new TaskRunner(); + +// _logger.Setup(x => x.Setup(It.IsAny(), It.IsAny())); +// var settings = new AgentSettings +// { +// AgentId = 1, +// AgentName = "agent1", +// ServerUrl = "https://test.visualstudio.com", +// WorkFolder = "_work", +// }; + +// _config.Setup(x => x.GetSettings()) +// .Returns(settings); + +// _proxy.Setup(x => x.ProxyAddress) +// .Returns(string.Empty); + +// if (_tokenSource != null) +// { +// _tokenSource.Dispose(); +// _tokenSource = null; +// } + +// _tokenSource = new CancellationTokenSource(); +// TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); +// TimelineReference timeline = new Timeline(Guid.NewGuid()); +// JobEnvironment environment = new JobEnvironment(); +// environment.Variables[Constants.Variables.System.Culture] = "en-US"; +// environment.SystemConnection = new ServiceEndpoint() +// { +// Name = WellKnownServiceEndpointNames.SystemVssConnection, +// Url = new Uri("https://test.visualstudio.com"), +// Authorization = new EndpointAuthorization() +// { +// Scheme = "Test", +// } +// }; +// environment.SystemConnection.Authorization.Parameters["AccessToken"] = "token"; + +// List tasks = new List() +// { +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task1", +// }, +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task2", +// }, +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task3", +// }, +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task4", +// }, +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task5", +// }, +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task6", +// }, +// new TaskInstance() +// { +// InstanceId = Guid.NewGuid(), +// DisplayName = "task7", +// }, +// }; + +// Guid JobId = Guid.NewGuid(); +// _message = Pipelines.AgentJobRequestMessageUtil.Convert(new AgentJobRequestMessage(plan, timeline, JobId, testName, testName, environment, tasks)); + +// _taskManager.Setup(x => x.DownloadAsync(It.IsAny(), It.IsAny>())) +// .Returns(Task.CompletedTask); + +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task1"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = null, +// Execution = new ExecutionData(), +// PostJobExecution = null, +// }, +// }); +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task2"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = new ExecutionData(), +// Execution = new ExecutionData(), +// PostJobExecution = new ExecutionData(), +// }, +// }); +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task3"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = new ExecutionData(), +// Execution = null, +// PostJobExecution = new ExecutionData(), +// }, +// }); +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task4"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = new ExecutionData(), +// Execution = null, +// PostJobExecution = null, +// }, +// }); +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task5"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = null, +// Execution = null, +// PostJobExecution = new ExecutionData(), +// }, +// }); +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task6"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = new ExecutionData(), +// Execution = new ExecutionData(), +// PostJobExecution = null, +// }, +// }); +// _taskManager.Setup(x => x.Load(It.Is(t => t.DisplayName == "task7"))) +// .Returns(new Definition() +// { +// Data = new DefinitionData() +// { +// PreJobExecution = null, +// Execution = new ExecutionData(), +// PostJobExecution = new ExecutionData(), +// }, +// }); + +// hc.SetSingleton(_taskManager.Object); +// hc.SetSingleton(_config.Object); +// hc.SetSingleton(_jobServerQueue.Object); +// hc.SetSingleton(_proxy.Object); +// hc.SetSingleton(_cert.Object); +// hc.SetSingleton(_express.Object); +// hc.SetSingleton(_containerProvider.Object); +// hc.SetSingleton(_logPlugin.Object); +// hc.EnqueueInstance(_logger.Object); // jobcontext logger +// hc.EnqueueInstance(_logger.Object); // init step logger +// hc.EnqueueInstance(_logger.Object); // step 1 +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); // step 12 + +// hc.EnqueueInstance(step1); +// hc.EnqueueInstance(step2); +// hc.EnqueueInstance(step3); +// hc.EnqueueInstance(step4); +// hc.EnqueueInstance(step5); +// hc.EnqueueInstance(step6); +// hc.EnqueueInstance(step7); +// hc.EnqueueInstance(step8); +// hc.EnqueueInstance(step9); +// hc.EnqueueInstance(step10); +// hc.EnqueueInstance(step11); +// hc.EnqueueInstance(step12); + +// _jobEc.Initialize(hc); +// _jobEc.InitializeJob(_message, _tokenSource.Token); +// return hc; +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task JobExtensioBuildStepsList() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// TestJobExtension testExtension = new TestJobExtension(); +// testExtension.Initialize(hc); +// List result = await testExtension.InitializeJob(_jobEc, _message); + +// var trace = hc.GetTrace(); + +// trace.Info(string.Join(", ", result.Select(x => x.DisplayName))); + +// Assert.Equal(12, result.Count); + +// Assert.Equal("task2", result[0].DisplayName); +// Assert.Equal("task3", result[1].DisplayName); +// Assert.Equal("task4", result[2].DisplayName); +// Assert.Equal("task6", result[3].DisplayName); +// Assert.Equal("task1", result[4].DisplayName); +// Assert.Equal("task2", result[5].DisplayName); +// Assert.Equal("task6", result[6].DisplayName); +// Assert.Equal("task7", result[7].DisplayName); +// Assert.Equal("task7", result[8].DisplayName); +// Assert.Equal("task5", result[9].DisplayName); +// Assert.Equal("task3", result[10].DisplayName); +// Assert.Equal("task2", result[11].DisplayName); +// } +// } + +// // [Fact] +// // [Trait("Level", "L0")] +// // [Trait("Category", "Worker")] +// // public async Task JobExtensionIntraTaskState() +// // { +// // using (TestHostContext hc = CreateTestContext()) +// // { +// // TestJobExtension testExtension = new TestJobExtension(); +// // testExtension.Initialize(hc); +// // List result = await testExtension.InitializeJob(_jobEc, _message); + +// // var trace = hc.GetTrace(); + +// // trace.Info(string.Join(", ", result.Select(x => x.DisplayName))); + +// // Assert.Equal(12, result.Count); + +// // result[0].ExecutionContext.TaskVariables.Set("state1", "value1", false); +// // Assert.Equal("value1", result[5].ExecutionContext.TaskVariables.Get("state1")); +// // Assert.Equal("value1", result[11].ExecutionContext.TaskVariables.Get("state1")); + +// // Assert.Null(result[4].ExecutionContext.TaskVariables.Get("state1")); +// // Assert.Null(result[1].ExecutionContext.TaskVariables.Get("state1")); +// // Assert.Null(result[2].ExecutionContext.TaskVariables.Get("state1")); +// // Assert.Null(result[10].ExecutionContext.TaskVariables.Get("state1")); +// // Assert.Null(result[6].ExecutionContext.TaskVariables.Get("state1")); +// // Assert.Null(result[7].ExecutionContext.TaskVariables.Get("state1")); +// // } +// // } + +// #if OS_WINDOWS +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task JobExtensionManagementScriptStep() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// hc.EnqueueInstance(_logger.Object); +// hc.EnqueueInstance(_logger.Object); + +// Environment.SetEnvironmentVariable("VSTS_AGENT_INIT_INTERNAL_TEMP_HACK", "C:\\init.ps1"); +// Environment.SetEnvironmentVariable("VSTS_AGENT_CLEANUP_INTERNAL_TEMP_HACK", "C:\\clenup.ps1"); + +// try +// { +// TestJobExtension testExtension = new TestJobExtension(); +// testExtension.Initialize(hc); +// List result = await testExtension.InitializeJob(_jobEc, _message); + +// var trace = hc.GetTrace(); + +// trace.Info(string.Join(", ", result.Select(x => x.DisplayName))); + +// Assert.Equal(14, result.Count); + +// Assert.True(result[0] is ManagementScriptStep); +// Assert.True(result[13] is ManagementScriptStep); + +// Assert.Equal(result[0].DisplayName, "Agent Initialization"); +// Assert.Equal(result[13].DisplayName, "Agent Cleanup"); +// } +// finally +// { +// Environment.SetEnvironmentVariable("VSTS_AGENT_INIT_INTERNAL_TEMP_HACK", ""); +// Environment.SetEnvironmentVariable("VSTS_AGENT_CLEANUP_INTERNAL_TEMP_HACK", ""); +// } +// } +// } +// #endif +// } +// } diff --git a/src/Test/L0/Worker/JobRunnerL0.cs b/src/Test/L0/Worker/JobRunnerL0.cs new file mode 100644 index 00000000000..c64a6eac0e7 --- /dev/null +++ b/src/Test/L0/Worker/JobRunnerL0.cs @@ -0,0 +1,223 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Worker; +using Moq; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading.Tasks; +using Xunit; +using System.Threading; +using System.Collections.ObjectModel; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class JobRunnerL0 + { + private IExecutionContext _jobEc; + private JobRunner _jobRunner; + private List _initResult = new List(); + private Pipelines.AgentJobRequestMessage _message; + private CancellationTokenSource _tokenSource; + private Mock _jobServer; + private Mock _jobServerQueue; + private Mock _proxyConfig; + private Mock _cert; + private Mock _config; + private Mock _extensions; + private Mock _stepRunner; + + private Mock _jobExtension; + private Mock _logger; + private Mock _temp; + private Mock _diagnosticLogManager; + + private TestHostContext CreateTestContext([CallerMemberName] String testName = "") + { + var hc = new TestHostContext(this, testName); + + _jobEc = new Runner.Worker.ExecutionContext(); + _config = new Mock(); + _extensions = new Mock(); + _jobExtension = new Mock(); + _jobServer = new Mock(); + _jobServerQueue = new Mock(); + _proxyConfig = new Mock(); + _cert = new Mock(); + _stepRunner = new Mock(); + _logger = new Mock(); + _temp = new Mock(); + _diagnosticLogManager = new Mock(); + + if (_tokenSource != null) + { + _tokenSource.Dispose(); + _tokenSource = null; + } + + _tokenSource = new CancellationTokenSource(); + var expressionManager = new ExpressionManager(); + expressionManager.Initialize(hc); + hc.SetSingleton(expressionManager); + + _jobRunner = new JobRunner(); + _jobRunner.Initialize(hc); + + TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference(); + TimelineReference timeline = new Timeline(Guid.NewGuid()); + JobEnvironment environment = new JobEnvironment(); + environment.Variables[Constants.Variables.System.Culture] = "en-US"; + environment.SystemConnection = new ServiceEndpoint() + { + Name = WellKnownServiceEndpointNames.SystemVssConnection, + Url = new Uri("https://test.visualstudio.com"), + Authorization = new EndpointAuthorization() + { + Scheme = "Test", + } + }; + environment.SystemConnection.Authorization.Parameters["AccessToken"] = "token"; + + List tasks = new List(); + Guid JobId = Guid.NewGuid(); + _message = Pipelines.AgentJobRequestMessageUtil.Convert(new AgentJobRequestMessage(plan, timeline, JobId, testName, testName, environment, tasks)); + _message.Resources.Repositories.Add(new Pipelines.RepositoryResource() + { + Alias = Pipelines.PipelineConstants.SelfAlias, + Id = "github", + Version = "sha1" + }); + _message.ContextData.Add("github", new Pipelines.ContextData.DictionaryContextData()); + + _initResult.Clear(); + + _jobExtension.Setup(x => x.InitializeJob(It.IsAny(), It.IsAny())). + Returns(Task.FromResult(_initResult)); + + _proxyConfig.Setup(x => x.ProxyAddress) + .Returns(string.Empty); + + var settings = new RunnerSettings + { + AgentId = 1, + AgentName = "agent1", + ServerUrl = "https://test.visualstudio.com", + WorkFolder = "_work", + }; + + _config.Setup(x => x.GetSettings()) + .Returns(settings); + + _logger.Setup(x => x.Setup(It.IsAny(), It.IsAny())); + + hc.SetSingleton(_config.Object); + hc.SetSingleton(_jobServer.Object); + hc.SetSingleton(_jobServerQueue.Object); + hc.SetSingleton(_proxyConfig.Object); + hc.SetSingleton(_cert.Object); + hc.SetSingleton(_stepRunner.Object); + hc.SetSingleton(_extensions.Object); + hc.SetSingleton(_temp.Object); + hc.SetSingleton(_diagnosticLogManager.Object); + hc.EnqueueInstance(_jobEc); + hc.EnqueueInstance(_logger.Object); + hc.EnqueueInstance(_jobExtension.Object); + return hc; + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async Task JobExtensionInitializeFailure() + { + using (TestHostContext hc = CreateTestContext()) + { + _jobExtension.Setup(x => x.InitializeJob(It.IsAny(), It.IsAny())) + .Throws(new Exception()); + + await _jobRunner.RunAsync(_message, _tokenSource.Token); + + Assert.Equal(TaskResult.Failed, _jobEc.Result); + _stepRunner.Verify(x => x.RunAsync(It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async Task JobExtensionInitializeCancelled() + { + using (TestHostContext hc = CreateTestContext()) + { + _jobExtension.Setup(x => x.InitializeJob(It.IsAny(), It.IsAny())) + .Throws(new OperationCanceledException()); + _tokenSource.Cancel(); + + await _jobRunner.RunAsync(_message, _tokenSource.Token); + + Assert.Equal(TaskResult.Canceled, _jobEc.Result); + _stepRunner.Verify(x => x.RunAsync(It.IsAny()), Times.Never); + } + } + + // TODO: Move these tests over to JobExtensionL0.cs + // [Fact] + // [Trait("Level", "L0")] + // [Trait("Category", "Worker")] + // public async Task UploadDiganosticLogIfEnvironmentVariableSet() + // { + // using (TestHostContext hc = CreateTestContext()) + // { + // _message.Variables[Constants.Variables.Actions.RunnerDebug] = "true"; + + // await _jobRunner.RunAsync(_message, _tokenSource.Token); + + // _diagnosticLogManager.Verify(x => + // x.UploadDiagnosticLogsAsync( + // It.IsAny(), + // It.IsAny(), + // It.IsAny()), + // Times.Once); + // } + // } + + // [Fact] + // [Trait("Level", "L0")] + // [Trait("Category", "Worker")] + // public async Task DontUploadDiagnosticLogIfEnvironmentVariableFalse() + // { + // using (TestHostContext hc = CreateTestContext()) + // { + // _message.Variables[Constants.Variables.Actions.RunnerDebug] = "false"; + + // await _jobRunner.RunAsync(_message, _tokenSource.Token); + + // _diagnosticLogManager.Verify(x => + // x.UploadDiagnosticLogsAsync( + // It.IsAny(), + // It.IsAny(), + // It.IsAny()), + // Times.Never); + // } + // } + + // [Fact] + // [Trait("Level", "L0")] + // [Trait("Category", "Worker")] + // public async Task DontUploadDiagnosticLogIfEnvironmentVariableMissing() + // { + // using (TestHostContext hc = CreateTestContext()) + // { + // await _jobRunner.RunAsync(_message, _tokenSource.Token); + + // _diagnosticLogManager.Verify(x => + // x.UploadDiagnosticLogsAsync( + // It.IsAny(), + // It.IsAny(), + // It.IsAny()), + // Times.Never); + // } + // } + } +} diff --git a/src/Test/L0/Worker/OutputManagerL0.cs b/src/Test/L0/Worker/OutputManagerL0.cs new file mode 100644 index 00000000000..8e0680ddaf5 --- /dev/null +++ b/src/Test/L0/Worker/OutputManagerL0.cs @@ -0,0 +1,728 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Runtime.CompilerServices; +using GitHub.Runner.Sdk; +using GitHub.Runner.Worker; +using GitHub.Runner.Worker.Handlers; +using Moq; +using Xunit; +using DTWebApi = GitHub.DistributedTask.WebApi; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class OutputManagerL0 + { + private Mock _executionContext; + private Mock _commandManager; + private Variables _variables; + private OnMatcherChanged _onMatcherChanged; + private List> _issues; + private List _messages; + private List _commands; + private OutputManager _outputManager; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void AddMatcher_Clobber() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+)", + Message = 1, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "my-matcher-2", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "NOT GOOD: (.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("ERROR: message 1"); + Process("NOT GOOD: message 2"); + Add(new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+) END MESSAGE", + Message = 1, + }, + }, + }); + Process("ERROR: message 3 END MESSAGE"); + Process("ERROR: message 4"); + Process("NOT GOOD: message 5"); + Assert.Equal(4, _issues.Count); + Assert.Equal("message 1", _issues[0].Item1.Message); + Assert.Equal("message 2", _issues[1].Item1.Message); + Assert.Equal("message 3", _issues[2].Item1.Message); + Assert.Equal("message 5", _issues[3].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(1, _messages.Count); + Assert.Equal("ERROR: message 4", _messages[0]); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void AddMatcher_Prepend() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+)", + Message = 1, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "my-matcher-2", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "NOT GOOD: (.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("ERROR: message 1"); + Process("NOT GOOD: message 2"); + Add(new IssueMatcherConfig + { + Owner = "new-matcher", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+) END MESSAGE", + Message = 1, + }, + }, + }); + Process("ERROR: message 3 END MESSAGE"); + Process("ERROR: message 4"); + Process("NOT GOOD: message 5"); + Assert.Equal(5, _issues.Count); + Assert.Equal("message 1", _issues[0].Item1.Message); + Assert.Equal("message 2", _issues[1].Item1.Message); + Assert.Equal("message 3", _issues[2].Item1.Message); + Assert.Equal("message 4", _issues[3].Item1.Message); + Assert.Equal("message 5", _issues[4].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(0, _messages.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Code() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = @"(.*): (.+)", + Code = 1, + Message = 2, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("BAD: real bad"); + Process(": not working"); + Assert.Equal(2, _issues.Count); + Assert.Equal("real bad", _issues[0].Item1.Message); + Assert.Equal("BAD", _issues[0].Item1.Data["code"]); + Assert.Equal("not working", _issues[1].Item1.Message); + Assert.False(_issues[1].Item1.Data.ContainsKey("code")); + Assert.Equal(0, _commands.Count); + Assert.Equal(0, _messages.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void DoesNotResetMatchingMatcher() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "Start: .+", + }, + new IssuePatternConfig + { + Pattern = "Error: (.+)", + Message = 1, + Loop = true, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("Start: hello"); + Process("Error: it broke"); + Process("Error: oh no"); + Process("Error: not good"); + Process("regular message 1"); + Process("Start: hello again"); + Process("Error: it broke again"); + Process("Error: real bad"); + Process("regular message 2"); + Assert.Equal(5, _issues.Count); + Assert.Equal("it broke", _issues[0].Item1.Message); + Assert.Equal("oh no", _issues[1].Item1.Message); + Assert.Equal("not good", _issues[2].Item1.Message); + Assert.Equal("it broke again", _issues[3].Item1.Message); + Assert.Equal("real bad", _issues[4].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(4, _messages.Count); + Assert.Equal("Start: hello", _messages[0]); + Assert.Equal("regular message 1", _messages[1]); + Assert.Equal("Start: hello again", _messages[2]); + Assert.Equal("regular message 2", _messages[3]); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void InitialMatchers() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+)", + Message = 1, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "my-matcher-2", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "NOT GOOD: (.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("ERROR: it is broken"); + Process("NOT GOOD: that did not work"); + Assert.Equal(2, _issues.Count); + Assert.Equal("it is broken", _issues[0].Item1.Message); + Assert.Equal("that did not work", _issues[1].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(0, _messages.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LineColumn() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = @"\((.+),(.+)\): (.+)", + Line = 1, + Column = 2, + Message = 3, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("(12,34): real bad"); + Process("(12,thirty-four): it is broken"); + Process("(twelve,34): not working"); + Assert.Equal(3, _issues.Count); + Assert.Equal("real bad", _issues[0].Item1.Message); + Assert.Equal("12", _issues[0].Item1.Data["line"]); + Assert.Equal("34", _issues[0].Item1.Data["col"]); + Assert.Equal("it is broken", _issues[1].Item1.Message); + Assert.Equal("12", _issues[1].Item1.Data["line"]); + Assert.False(_issues[1].Item1.Data.ContainsKey("col")); + Assert.Equal("not working", _issues[2].Item1.Message); + Assert.False(_issues[2].Item1.Data.ContainsKey("line")); + Assert.Equal("34", _issues[2].Item1.Data["col"]); + Assert.Equal(0, _commands.Count); + Assert.Equal(2, _messages.Count); + Assert.Equal("##[debug]Unable to parse column number 'thirty-four'", _messages[0]); + Assert.Equal("##[debug]Unable to parse line number 'twelve'", _messages[1]); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void ProcessCommand() + { + using (Setup()) + using (_outputManager) + { + Add(new IssueMatcherConfig + { + Owner = "my-matcher", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+)", + Message = 1, + }, + }, + }); + Process("this line is an ERROR: it is broken"); + Process("##[some-command]this line is a command even though it contains ERROR: not working"); + Process("this line is a command too ##[some-command]even though it contains ERROR: not working again"); + Process("##[not-command]this line is an ERROR: it is broken again"); + Assert.Equal(2, _issues.Count); + Assert.Equal("it is broken", _issues[0].Item1.Message); + Assert.Equal("it is broken again", _issues[1].Item1.Message); + Assert.Equal(2, _commands.Count); + Assert.Equal("##[some-command]this line is a command even though it contains ERROR: not working", _commands[0]); + Assert.Equal("this line is a command too ##[some-command]even though it contains ERROR: not working again", _commands[1]); + Assert.Equal(0, _messages.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void RemoveColorCodes() + { + using (Setup()) + using (_outputManager) + { + Add(new IssueMatcherConfig + { + Owner = "my-matcher", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "^the error: (.+)$", + Message = 1, + }, + }, + }); + Process("the error: \033[31mred, \033[1;31mbright red, \033[mreset"); + Assert.Equal(1, _issues.Count); + Assert.Equal("red, bright red, reset", _issues[0].Item1.Message); + Assert.Equal("the error: red, bright red, reset", _issues[0].Item2); + Assert.Equal(0, _commands.Count); + Assert.Equal(0, _messages.Count); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void RemoveMatcher() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR: (.+)", + Message = 1, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "my-matcher-2", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "NOT GOOD: (.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("ERROR: message 1"); + Process("NOT GOOD: message 2"); + Remove("my-matcher-1"); + Process("ERROR: message 3"); + Process("NOT GOOD: message 4"); + Assert.Equal(3, _issues.Count); + Assert.Equal("message 1", _issues[0].Item1.Message); + Assert.Equal("message 2", _issues[1].Item1.Message); + Assert.Equal("message 4", _issues[2].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(1, _messages.Count); + Assert.Equal("ERROR: message 3", _messages[0]); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void ResetsOtherMatchers() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "Matches both line 1: .+", + }, + new IssuePatternConfig + { + Pattern = "Matches 1 only line 2: (.+)", + Message = 1, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "my-matcher-2", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "Matches both line 1: (.+)", + }, + new IssuePatternConfig + { + Pattern = "(.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("Matches both line 1: hello"); + Process("Matches 1 only line 2: it broke"); + Process("regular message 1"); + Process("regular message 2"); + Process("Matches both line 1: hello again"); + Process("oh no, another error"); + Assert.Equal(2, _issues.Count); + Assert.Equal("it broke", _issues[0].Item1.Message); + Assert.Equal("oh no, another error", _issues[1].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(4, _messages.Count); + Assert.Equal("Matches both line 1: hello", _messages[0]); + Assert.Equal("regular message 1", _messages[1]); + Assert.Equal("regular message 2", _messages[2]); + Assert.Equal("Matches both line 1: hello again", _messages[3]); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Severity() + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "my-matcher-1", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "(.*): (.+)", + Severity = 1, + Message = 2, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "my-matcher-2", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = "ERROR! (.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("ERRor: real bad"); + Process("WARNing: not great"); + Process("info: hey"); + Process(": not working"); + Process("ERROR! uh oh"); + Assert.Equal(4, _issues.Count); + Assert.Equal("real bad", _issues[0].Item1.Message); + Assert.Equal(DTWebApi.IssueType.Error, _issues[0].Item1.Type); + Assert.Equal("not great", _issues[1].Item1.Message); + Assert.Equal(DTWebApi.IssueType.Warning, _issues[1].Item1.Type); + Assert.Equal("not working", _issues[2].Item1.Message); + Assert.Equal(DTWebApi.IssueType.Error, _issues[2].Item1.Type); + Assert.Equal("uh oh", _issues[3].Item1.Message); + Assert.Equal(DTWebApi.IssueType.Error, _issues[3].Item1.Type); + Assert.Equal(0, _commands.Count); + Assert.Equal(2, _messages.Count); + Assert.StartsWith("##[debug]Skipped", _messages[0]); + Assert.Contains("'info'", _messages[0]); + Assert.Equal("info: hey", _messages[1]); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Timeout() + { + Environment.SetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_ISSUE_MATCHER_TIMEOUT", "0:0:0.01"); + var matchers = new IssueMatchersConfig + { + Matchers = + { + new IssueMatcherConfig + { + Owner = "email", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = @"^((([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+(\.([a-z]|\d|[!#\$%&'\*\+\-\/=\?\^_`{\|}~]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])+)*)|((\x22)((((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(([\x01-\x08\x0b\x0c\x0e-\x1f\x7f]|\x21|[\x23-\x5b]|[\x5d-\x7e]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(\\([\x01-\x09\x0b\x0c\x0d-\x7f]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))))*(((\x20|\x09)*(\x0d\x0a))?(\x20|\x09)+)?(\x22)))@((([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|\d|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.)+(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])|(([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])([a-z]|\d|-|\.|_|~|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])*([a-z]|[\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF])))\.?$", + Message = 0, + }, + }, + }, + new IssueMatcherConfig + { + Owner = "err", + Patterns = new[] + { + new IssuePatternConfig + { + Pattern = @"ERR: (.+)", + Message = 1, + }, + }, + }, + }, + }; + using (Setup(matchers: matchers)) + using (_outputManager) + { + Process("john.doe@contoso.com"); + Process("t@t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.c%20"); + Process("jane.doe@contoso.com"); + Process("ERR: this error"); + Assert.Equal(3, _issues.Count); + Assert.Equal("john.doe@contoso.com", _issues[0].Item1.Message); + Assert.Contains("Removing issue matcher 'email'", _issues[1].Item1.Message); + Assert.Equal("this error", _issues[2].Item1.Message); + Assert.Equal(0, _commands.Count); + Assert.Equal(2, _messages.Where(x => x.StartsWith("##[debug]Timeout processing issue matcher")).Count()); + Assert.Equal(1, _messages.Where(x => x.Equals("t@t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.t.c%20")).Count()); + Assert.Equal(1, _messages.Where(x => x.StartsWith("jane.doe@contoso.com")).Count()); + } + } + + // todo: roots file against fromPath + // todo: roots file against system.defaultWorkingDirectory + // todo: matches repository + // todo: checks file exists + + private TestHostContext Setup( + [CallerMemberName] string name = "", + IssueMatchersConfig matchers = null) + { + matchers?.Validate(); + + _onMatcherChanged = null; + _issues = new List>(); + _messages = new List(); + _commands = new List(); + + var hostContext = new TestHostContext(this, name); + + _variables = new Variables(hostContext, new Dictionary()); + + _executionContext = new Mock(); + _executionContext.Setup(x => x.WriteDebug) + .Returns(true); + _executionContext.Setup(x => x.Variables) + .Returns(_variables); + _executionContext.Setup(x => x.GetMatchers()) + .Returns(matchers?.Matchers ?? new List()); + _executionContext.Setup(x => x.Add(It.IsAny())) + .Callback((OnMatcherChanged handler) => + { + _onMatcherChanged = handler; + }); + _executionContext.Setup(x => x.AddIssue(It.IsAny(), It.IsAny())) + .Callback((DTWebApi.Issue issue, string logMessage) => + { + _issues.Add(new Tuple(issue, logMessage)); + }); + _executionContext.Setup(x => x.Write(It.IsAny(), It.IsAny())) + .Callback((string tag, string message) => + { + _messages.Add($"{tag}{message}"); + hostContext.GetTrace().Info($"{tag}{message}"); + }); + + _commandManager = new Mock(); + _commandManager.Setup(x => x.TryProcessCommand(It.IsAny(), It.IsAny())) + .Returns((IExecutionContext executionContext, string line) => + { + if (line.IndexOf("##[some-command]") >= 0) + { + _commands.Add(line); + return true; + } + + return false; + }); + + _outputManager = new OutputManager(_executionContext.Object, _commandManager.Object); + return hostContext; + } + + private void Add(IssueMatcherConfig matcher) + { + var matchers = new IssueMatchersConfig + { + Matchers = + { + matcher, + }, + }; + matchers.Validate(); + _onMatcherChanged(null, new MatcherChangedEventArgs(matcher)); + } + + private void Remove(string owner) + { + var matcher = new IssueMatcherConfig { Owner = owner }; + _onMatcherChanged(null, new MatcherChangedEventArgs(matcher)); + } + + private void Process(string line) + { + _outputManager.OnDataReceived(null, new ProcessDataReceivedEventArgs(line)); + } + } +} diff --git a/src/Test/L0/Worker/PipelineDirectoryManagerL0.cs b/src/Test/L0/Worker/PipelineDirectoryManagerL0.cs new file mode 100644 index 00000000000..de0ac5be6ca --- /dev/null +++ b/src/Test/L0/Worker/PipelineDirectoryManagerL0.cs @@ -0,0 +1,229 @@ +using Pipelines = GitHub.DistributedTask.Pipelines; +using GitHub.Runner.Worker; +using Moq; +using System.IO; +using System.Runtime.CompilerServices; +using Xunit; +using System; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class PipelineDirectoryManagerL0 + { + private PipelineDirectoryManager _pipelineDirectoryManager; + private Mock _ec; + private Pipelines.WorkspaceOptions _workspaceOptions; + private TrackingConfig _existingConfig; + private TrackingConfig _newConfig; + private string _trackingFile; + private Mock _trackingManager; + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void CreatesPipelineDirectories() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(default(TrackingConfig)); + _trackingManager.Setup(x => x.Create(_ec.Object, _trackingFile)).Returns(new TrackingConfig(_ec.Object)); + + // Act. + _newConfig = _pipelineDirectoryManager.PrepareDirectory(_ec.Object, _workspaceOptions); + + // Assert. + _trackingManager.Verify(x => x.Create(_ec.Object, _trackingFile)); + Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _newConfig.WorkspaceDirectory))); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void DeletesResourceDirectoryWhenCleanIsResources() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + _workspaceOptions.Clean = Pipelines.PipelineConstants.WorkspaceCleanOptions.Resources; + string workspaceDirectory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.WorkspaceDirectory); + string sourceFile = Path.Combine(workspaceDirectory, "some subdirectory", "some source file"); + Directory.CreateDirectory(Path.GetDirectoryName(sourceFile)); + File.WriteAllText(path: sourceFile, contents: "some source contents"); + + // Act. + _pipelineDirectoryManager.PrepareDirectory(_ec.Object, _workspaceOptions); + + // Assert. + Assert.True(Directory.Exists(workspaceDirectory)); + Assert.Equal(0, Directory.GetFileSystemEntries(workspaceDirectory, "*", SearchOption.AllDirectories).Length); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void DeletesNonResourceDirectoryWhenCleanIsOutputs() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + _workspaceOptions.Clean = Pipelines.PipelineConstants.WorkspaceCleanOptions.Outputs; + string nonResourceDirectory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.PipelineDirectory, "somedir"); + string sourceFile = Path.Combine(nonResourceDirectory, "some subdirectory", "some source file"); + Directory.CreateDirectory(Path.GetDirectoryName(sourceFile)); + File.WriteAllText(path: sourceFile, contents: "some source contents"); + + // Act. + _pipelineDirectoryManager.PrepareDirectory(_ec.Object, _workspaceOptions); + + // Assert. + Assert.False(Directory.Exists(nonResourceDirectory)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void RecreatesPipelinesDirectoryWhenCleanIsAll() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + _workspaceOptions.Clean = Pipelines.PipelineConstants.WorkspaceCleanOptions.All; + + string pipelinesDirectory = Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.PipelineDirectory); + string looseFile = Path.Combine(pipelinesDirectory, "some loose directory", "some loose file"); + Directory.CreateDirectory(Path.GetDirectoryName(looseFile)); + File.WriteAllText(path: looseFile, contents: "some loose file contents"); + + // Act. + _pipelineDirectoryManager.PrepareDirectory(_ec.Object, _workspaceOptions); + + // Assert. + Assert.Equal(1, Directory.GetFileSystemEntries(pipelinesDirectory, "*", SearchOption.AllDirectories).Length); + Assert.True(Directory.Exists(Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.WorkspaceDirectory))); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void UpdatesExistingConfig() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + // Act. + _pipelineDirectoryManager.PrepareDirectory(_ec.Object, _workspaceOptions); + + // Assert. + _trackingManager.Verify(x => x.LoadIfExists(_ec.Object, _trackingFile)); + _trackingManager.Verify(x => x.Update(_ec.Object, _existingConfig, _trackingFile)); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void UpdatesRepositoryDirectoryWorkspaceRepo() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + // Act. + _pipelineDirectoryManager.UpdateRepositoryDirectory(_ec.Object, "actions/runner", Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.PipelineDirectory, "my_new_path"), true); + + // Assert. + _trackingManager.Verify(x => x.LoadIfExists(_ec.Object, _trackingFile)); + _trackingManager.Verify(x => x.Update(_ec.Object, _existingConfig, _trackingFile)); + _ec.Verify(x => x.SetGitHubContext("workspace", Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.PipelineDirectory, "my_new_path"))); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void UpdatesRepositoryDirectoryNoneWorkspaceRepo() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + // Act. + _pipelineDirectoryManager.UpdateRepositoryDirectory(_ec.Object, "actions/notrunner", Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), _existingConfig.PipelineDirectory, "notrunner"), false); + + // Assert. + _trackingManager.Verify(x => x.LoadIfExists(_ec.Object, _trackingFile)); + _trackingManager.Verify(x => x.Update(_ec.Object, _existingConfig, _trackingFile)); + _ec.Verify(x => x.SetGitHubContext("workspace", It.IsAny()), Times.Never); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void UpdatesRepositoryDirectoryThrowOnInvalidPath() + { + // Arrange. + using (TestHostContext hc = Setup()) + { + _existingConfig = new TrackingConfig(_ec.Object); + _trackingManager.Setup(x => x.LoadIfExists(_ec.Object, _trackingFile)).Returns(_existingConfig); + + // Act. + Assert.ThrowsAny(()=> _pipelineDirectoryManager.UpdateRepositoryDirectory(_ec.Object, "actions/notrunner", Path.Combine(hc.GetDirectory(WellKnownDirectory.Work), "not_under_pipeline_directory"), false)); + } + } + + + private TestHostContext Setup( + [CallerMemberName] string name = "") + { + // Setup the host context. + TestHostContext hc = new TestHostContext(this, name); + + // Setup the execution context. + _ec = new Mock(); + + GitHubContext githubContext = new GitHubContext(); + _ec.Setup(x => x.GetGitHubContext("repository")).Returns("actions/runner"); + + // Store the expected tracking file path. + _trackingFile = Path.Combine( + hc.GetDirectory(WellKnownDirectory.Work), + Constants.Pipeline.Path.PipelineMappingDirectory, + "actions/runner", + Constants.Pipeline.Path.TrackingConfigFile); + + _workspaceOptions = new Pipelines.WorkspaceOptions(); + + // Setup the tracking manager. + _trackingManager = new Mock(); + hc.SetSingleton(_trackingManager.Object); + + // Setup the build directory manager. + _pipelineDirectoryManager = new PipelineDirectoryManager(); + _pipelineDirectoryManager.Initialize(hc); + return hc; + } + } +} diff --git a/src/Test/L0/Worker/StepsRunnerL0.cs b/src/Test/L0/Worker/StepsRunnerL0.cs new file mode 100644 index 00000000000..071625c0464 --- /dev/null +++ b/src/Test/L0/Worker/StepsRunnerL0.cs @@ -0,0 +1,448 @@ +// using GitHub.DistributedTask.WebApi; +// using GitHub.Runner.Worker; +// using Moq; +// using System; +// using System.Collections.Generic; +// using System.Globalization; +// using System.Linq; +// using System.Runtime.CompilerServices; +// using System.Threading.Tasks; +// using Xunit; +// using GitHub.DistributedTask.Expressions2; +// using GitHub.DistributedTask.Pipelines.ContextData; + +// namespace GitHub.Runner.Common.Tests.Worker +// { +// public sealed class StepsRunnerL0 +// { +// private Mock _ec; +// private StepsRunner _stepsRunner; +// private Variables _variables; +// private Dictionary _contexts; +// private TestHostContext CreateTestContext([CallerMemberName] String testName = "") +// { +// var hc = new TestHostContext(this, testName); +// var expressionManager = new ExpressionManager(); +// expressionManager.Initialize(hc); +// hc.SetSingleton(expressionManager); +// Dictionary variablesToCopy = new Dictionary(); +// variablesToCopy.Add(Constants.Variables.Agent.RetainDefaultEncoding, new VariableValue("true", false)); +// _variables = new Variables( +// hostContext: hc, +// copy: variablesToCopy); +// _ec = new Mock(); +// _ec.SetupAllProperties(); +// _ec.Setup(x => x.Variables).Returns(_variables); + +// _contexts = new Dictionary(); +// _contexts["github"] = new DictionaryContextData(); +// _contexts["runner"] = new DictionaryContextData(); +// _contexts["actions"] = new DictionaryContextData(); +// _ec.Setup(x => x.ExpressionValues).Returns(_contexts); + +// var _stepContext = new StepsContext(); +// _ec.Setup(x => x.StepsContext).Returns(_stepContext); +// _stepsRunner = new StepsRunner(); +// _stepsRunner.Initialize(hc); +// return hc; +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task RunNormalStepsAllStepPass() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.SucceededOrFailed) }, +// new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) } +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded); +// Assert.Equal(2, variableSet.Length); +// variableSet[0].Verify(x => x.RunAsync()); +// variableSet[1].Verify(x => x.RunAsync()); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task RunNormalStepsContinueOnError() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true), CreateStep(TaskResult.Succeeded, ExpressionManager.SucceededOrFailed) }, +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true), CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true) }, +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true), CreateStep(TaskResult.Failed, ExpressionManager.SucceededOrFailed, true) }, +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, true), CreateStep(TaskResult.Failed, ExpressionManager.Always, true) } +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(TaskResult.SucceededWithIssues, _ec.Object.Result); +// Assert.Equal(2, variableSet.Length); +// variableSet[0].Verify(x => x.RunAsync()); +// variableSet[1].Verify(x => x.RunAsync()); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task RunsAfterFailureBasedOnCondition() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// Expected = false, +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.SucceededOrFailed) }, +// Expected = true, +// }, +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Steps.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(TaskResult.Failed, _ec.Object.Result ?? TaskResult.Succeeded); +// Assert.Equal(2, variableSet.Steps.Length); +// variableSet.Steps[0].Verify(x => x.RunAsync()); +// variableSet.Steps[1].Verify(x => x.RunAsync(), variableSet.Expected ? Times.Once() : Times.Never()); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task RunsAlwaysSteps() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new +// { +// Steps = new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// Expected = TaskResult.Succeeded, +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// Expected = TaskResult.Failed, +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// Expected = TaskResult.Failed, +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Failed, ExpressionManager.Always) }, +// Expected = TaskResult.Failed, +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Failed, ExpressionManager.Always, true) }, +// Expected = TaskResult.SucceededWithIssues, +// }, +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Steps.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(variableSet.Expected, _ec.Object.Result ?? TaskResult.Succeeded); +// Assert.Equal(2, variableSet.Steps.Length); +// variableSet.Steps[0].Verify(x => x.RunAsync()); +// variableSet.Steps[1].Verify(x => x.RunAsync()); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task SetsJobResultCorrectly() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// Expected = TaskResult.Failed +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.SucceededOrFailed) }, +// Expected = TaskResult.Failed +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// Expected = TaskResult.Failed +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, continueOnError: true), CreateStep(TaskResult.Failed, ExpressionManager.Succeeded) }, +// Expected = TaskResult.Failed +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, continueOnError: true), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// Expected = TaskResult.SucceededWithIssues +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, continueOnError: true), CreateStep(TaskResult.Failed, ExpressionManager.Succeeded, continueOnError: true) }, +// Expected = TaskResult.SucceededWithIssues +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.SucceededOrFailed) }, +// Expected = TaskResult.Succeeded +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Failed, ExpressionManager.Succeeded) }, +// Expected = TaskResult.Failed +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.SucceededWithIssues, ExpressionManager.Succeeded) }, +// Expected = TaskResult.SucceededWithIssues +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.SucceededWithIssues, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// Expected = TaskResult.SucceededWithIssues +// }, +// new +// { +// Steps = new[] { CreateStep(TaskResult.SucceededWithIssues, ExpressionManager.Succeeded), CreateStep(TaskResult.Failed, ExpressionManager.Succeeded) }, +// Expected = TaskResult.Failed +// }, +// // Abandoned +// // Canceled +// // Failed +// // Skipped +// // Succeeded +// // SucceededWithIssues +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Steps.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.True( +// variableSet.Expected == (_ec.Object.Result ?? TaskResult.Succeeded), +// $"Expected '{variableSet.Expected}'. Actual '{_ec.Object.Result}'. Steps: {FormatSteps(variableSet.Steps)}"); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task SkipsAfterFailureOnlyBaseOnCondition() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new +// { +// Step = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// Expected = false +// }, +// new +// { +// Step = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.SucceededOrFailed) }, +// Expected = true +// }, +// new +// { +// Step = new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// Expected = true +// } +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Step.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(2, variableSet.Step.Length); +// variableSet.Step[0].Verify(x => x.RunAsync()); +// variableSet.Step[1].Verify(x => x.RunAsync(), variableSet.Expected ? Times.Once() : Times.Never()); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task AlwaysMeansAlways() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// // Arrange. +// var variableSets = new[] +// { +// new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// new[] { CreateStep(TaskResult.SucceededWithIssues, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// new[] { CreateStep(TaskResult.Failed, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) }, +// new[] { CreateStep(TaskResult.Canceled, ExpressionManager.Succeeded), CreateStep(TaskResult.Succeeded, ExpressionManager.Always) } +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(2, variableSet.Length); +// variableSet[0].Verify(x => x.RunAsync()); +// variableSet[1].Verify(x => x.RunAsync(), Times.Once()); +// } +// } +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public async Task TreatsConditionErrorAsFailure() +// { +// using (TestHostContext hc = CreateTestContext()) +// { +// var expressionManager = new Mock(); +// expressionManager.Object.Initialize(hc); +// hc.SetSingleton(expressionManager.Object); +// expressionManager.Setup(x => x.Evaluate(It.IsAny(), It.IsAny(), It.IsAny())).Throws(new Exception()); + +// // Arrange. +// var variableSets = new[] +// { +// new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// new[] { CreateStep(TaskResult.Succeeded, ExpressionManager.Succeeded) }, +// }; +// foreach (var variableSet in variableSets) +// { +// _ec.Object.Result = null; + +// // Act. +// await _stepsRunner.RunAsync( +// jobContext: _ec.Object, +// steps: variableSet.Select(x => x.Object).ToList()); + +// // Assert. +// Assert.Equal(TaskResult.Failed, _ec.Object.Result ?? TaskResult.Succeeded); +// } +// } +// } + +// private Mock CreateStep(TaskResult result, IExpressionNode condition, Boolean continueOnError = false) +// { +// // Setup the step. +// var step = new Mock(); +// step.Setup(x => x.Condition).Returns(condition); +// step.Setup(x => x.ContinueOnError).Returns(continueOnError); +// step.Setup(x => x.Enabled).Returns(true); +// step.Setup(x => x.RunAsync()).Returns(Task.CompletedTask); + +// // Setup the step execution context. +// var stepContext = new Mock(); +// stepContext.SetupAllProperties(); +// stepContext.Setup(x => x.Variables).Returns(_variables); +// stepContext.Setup(x => x.ExpressionValues).Returns(_contexts); +// stepContext.Setup(x => x.Complete(It.IsAny(), It.IsAny(), It.IsAny())) +// .Callback((TaskResult? r, string currentOperation, string resultCode) => +// { +// if (r != null) +// { +// stepContext.Object.Result = r; +// } +// }); +// stepContext.Object.Result = result; +// step.Setup(x => x.ExecutionContext).Returns(stepContext.Object); + +// return step; +// } + +// private string FormatSteps(IEnumerable> steps) +// { +// return String.Join( +// " ; ", +// steps.Select(x => String.Format( +// CultureInfo.InvariantCulture, +// "Returns={0},Condition=[{1}],ContinueOnError={2},Enabled={3}", +// x.Object.ExecutionContext.Result, +// x.Object.Condition, +// x.Object.ContinueOnError, +// x.Object.Enabled))); +// } +// } +// } diff --git a/src/Test/L0/Worker/TaskCommandExtensionL0.cs b/src/Test/L0/Worker/TaskCommandExtensionL0.cs new file mode 100644 index 00000000000..e695fe09d91 --- /dev/null +++ b/src/Test/L0/Worker/TaskCommandExtensionL0.cs @@ -0,0 +1,181 @@ +// using System; +// using System.Collections.Generic; +// using System.Runtime.CompilerServices; +// using GitHub.DistributedTask.WebApi; +// using GitHub.Runner.Worker; +// using Moq; +// using Xunit; + +// namespace GitHub.Runner.Common.Tests.Worker +// { +// public sealed class TaskCommandExtensionL0 +// { +// private TestHostContext _hc; +// private Mock _ec; +// private ServiceEndpoint _endpoint; + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointAuthParameter() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// commandExtension.Initialize(_hc); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Data = "blah"; +// cmd.Properties.Add("field", "authParameter"); +// cmd.Properties.Add("id", Guid.Empty.ToString()); +// cmd.Properties.Add("key", "test"); + +// commandExtension.ProcessCommand(_ec.Object, cmd); + +// Assert.Equal(_endpoint.Authorization.Parameters["test"], "blah"); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointDataParameter() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Data = "blah"; +// cmd.Properties.Add("field", "dataParameter"); +// cmd.Properties.Add("id", Guid.Empty.ToString()); +// cmd.Properties.Add("key", "test"); + +// commandExtension.ProcessCommand(_ec.Object, cmd); + +// Assert.Equal(_endpoint.Data["test"], "blah"); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointUrlParameter() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Data = "http://blah/"; +// cmd.Properties.Add("field", "url"); +// cmd.Properties.Add("id", Guid.Empty.ToString()); + +// commandExtension.ProcessCommand(_ec.Object, cmd); + +// Assert.Equal(_endpoint.Url.ToString(), cmd.Data); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointWithoutValue() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointWithoutEndpointField() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); + +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointInvalidEndpointField() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Properties.Add("field", "blah"); + +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointWithoutEndpointId() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Properties.Add("field", "url"); + +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointInvalidEndpointId() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Properties.Add("field", "url"); +// cmd.Properties.Add("id", "blah"); + +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointIdWithoutEndpointKey() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Properties.Add("field", "authParameter"); +// cmd.Properties.Add("id", Guid.Empty.ToString()); + +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// [Fact] +// [Trait("Level", "L0")] +// [Trait("Category", "Worker")] +// public void SetEndpointUrlWithInvalidValue() +// { +// SetupMocks(); +// TaskCommandExtension commandExtension = new TaskCommandExtension(); +// var cmd = new Command("task", "setEndpoint"); +// cmd.Data = "blah"; +// cmd.Properties.Add("field", "url"); +// cmd.Properties.Add("id", Guid.Empty.ToString()); + +// Assert.Throws(() => commandExtension.ProcessCommand(_ec.Object, cmd)); +// } + +// private void SetupMocks([CallerMemberName] string name = "") +// { +// _hc = new TestHostContext(this, name); +// _ec = new Mock(); + +// _endpoint = new ServiceEndpoint() +// { +// Id = Guid.Empty, +// Url = new Uri("https://test.com"), +// Authorization = new EndpointAuthorization() +// { +// Scheme = "Test", +// } +// }; + +// _ec.Setup(x => x.Endpoints).Returns(new List { _endpoint }); +// } +// } +// } diff --git a/src/Test/L0/Worker/TrackingManagerL0.cs b/src/Test/L0/Worker/TrackingManagerL0.cs new file mode 100644 index 00000000000..8b2c1bffc46 --- /dev/null +++ b/src/Test/L0/Worker/TrackingManagerL0.cs @@ -0,0 +1,129 @@ +using GitHub.Runner.Worker; +using Moq; +using System; +using System.IO; +using System.Runtime.CompilerServices; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class TrackingManagerL0 + { + private Mock _ec; + private TrackingManager _trackingManager; + private string _workFolder; + + public TestHostContext Setup([CallerMemberName] string name = "") + { + // Setup the host context. + TestHostContext hc = new TestHostContext(this, name); + + // Create a random work path. + _workFolder = hc.GetDirectory(WellKnownDirectory.Work); + + // Setup the execution context. + _ec = new Mock(); + GitHubContext githubContext = new GitHubContext(); + _ec.Setup(x => x.GetGitHubContext("repository")).Returns("actions/runner"); + + // Setup the tracking manager. + _trackingManager = new TrackingManager(); + _trackingManager.Initialize(hc); + + return hc; + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void CreatesTrackingConfig() + { + using (TestHostContext hc = Setup()) + { + // Arrange. + string trackingFile = Path.Combine(_workFolder, "trackingconfig.json"); + DateTimeOffset testStartOn = DateTimeOffset.Now; + + // Act. + _trackingManager.Create(_ec.Object, trackingFile); + + // Assert. + TrackingConfig config = _trackingManager.LoadIfExists(_ec.Object, trackingFile); + Assert.Equal("runner", config.PipelineDirectory); + Assert.Equal($"runner{Path.DirectorySeparatorChar}runner", config.WorkspaceDirectory); + Assert.Equal("actions/runner", config.RepositoryName); + + Assert.Equal(1, config.Repositories.Count); + Assert.Equal($"runner{Path.DirectorySeparatorChar}runner", config.Repositories["actions/runner"].RepositoryPath); + + // Manipulate the expected seconds due to loss of granularity when the + // date-time-offset is serialized in a friendly format. + Assert.True(testStartOn.AddSeconds(-1) <= config.LastRunOn); + Assert.True(DateTimeOffset.Now.AddSeconds(1) >= config.LastRunOn); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsTrackingConfig() + { + using (TestHostContext hc = Setup()) + { + // Arrange. + Directory.CreateDirectory(_workFolder); + string filePath = Path.Combine(_workFolder, "trackingconfig.json"); + _trackingManager.Create(_ec.Object, filePath); + + // Act. + TrackingConfig config = _trackingManager.LoadIfExists(_ec.Object, filePath); + + // Assert. + Assert.NotNull(config); + Assert.Equal("actions/runner", config.RepositoryName); + Assert.Equal("runner", config.PipelineDirectory); + Assert.Equal($"runner{Path.DirectorySeparatorChar}runner", config.WorkspaceDirectory); + Assert.Equal(1, config.Repositories.Count); + Assert.Equal($"runner{Path.DirectorySeparatorChar}runner", config.Repositories["actions/runner"].RepositoryPath); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void LoadsTrackingConfig_NotExists() + { + using (TestHostContext hc = Setup()) + { + // Act. + TrackingConfig config = _trackingManager.LoadIfExists( + _ec.Object, + Path.Combine(_workFolder, "foo.json")); + + // Assert. + Assert.Null(config); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void UpdatesTrackingConfigJobRunProperties() + { + using (TestHostContext hc = Setup()) + { + // Arrange. + TrackingConfig config = new TrackingConfig() { RepositoryName = "actions/runner" }; + string trackingFile = Path.Combine(_workFolder, "trackingconfig.json"); + + // Act. + _trackingManager.Update(_ec.Object, config, trackingFile); + + // Assert. + config = _trackingManager.LoadIfExists(_ec.Object, trackingFile); + Assert.NotNull(config); + Assert.Equal("actions/runner", config.RepositoryName); + } + } + } +} diff --git a/src/Test/L0/Worker/VariablesL0.cs b/src/Test/L0/Worker/VariablesL0.cs new file mode 100644 index 00000000000..8f41f10bf6c --- /dev/null +++ b/src/Test/L0/Worker/VariablesL0.cs @@ -0,0 +1,193 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.Runner.Common.Util; +using GitHub.Runner.Sdk; +using GitHub.Runner.Worker; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using Xunit; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class VariablesL0 + { + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Constructor_AppliesMaskHints() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var copy = new Dictionary + { + { "MySecretName", new VariableValue("My secret value", true) }, + { "MyPublicVariable", "My public value" }, + }; + var variables = new Variables(hc, copy); + + // Assert. + Assert.Equal(2, variables.AllVariables.Count()); + Assert.Equal("My public value", variables.Get("MyPublicVariable")); + Assert.Equal("My secret value", variables.Get("MySecretName")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Constructor_HandlesNullValue() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var copy = new Dictionary + { + { "variable1", new VariableValue(null, false) }, + { "variable2", "some variable 2 value" }, + }; + + // Act. + var variables = new Variables(hc, copy); + + // Assert. + Assert.Equal(string.Empty, variables.Get("variable1")); + Assert.Equal("some variable 2 value", variables.Get("variable2")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Constructor_SetsNullAsEmpty() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var copy = new Dictionary + { + { "variable1", new VariableValue(null, false) }, + }; + + // Act. + var variables = new Variables(hc, copy); + + // Assert. + Assert.Equal(string.Empty, variables.Get("variable1")); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Constructor_SetsOrdinalIgnoreCaseComparer() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + CultureInfo currentCulture = CultureInfo.CurrentCulture; + CultureInfo currentUICulture = CultureInfo.CurrentUICulture; + try + { + CultureInfo.CurrentCulture = new CultureInfo("tr-TR"); + CultureInfo.CurrentUICulture = new CultureInfo("tr-TR"); + var copy = new Dictionary + { + { "i", "foo" }, + { "I", "foo" }, + }; + + // Act. + var variables = new Variables(hc, copy); + + // Assert. + Assert.Equal(1, variables.AllVariables.Count()); + } + finally + { + // Cleanup. + CultureInfo.CurrentCulture = currentCulture; + CultureInfo.CurrentUICulture = currentUICulture; + } + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Constructor_SkipVariableWithEmptyName() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var copy = new Dictionary + { + { "", "" }, + { " ", "" }, + { "MyPublicVariable", "My public value" }, + }; + + var variables = new Variables(hc, copy); + + // Assert. + Assert.Equal(1, variables.AllVariables.Count()); + Assert.Equal("MyPublicVariable", variables.AllVariables.Single().Name); + Assert.Equal("My public value", variables.AllVariables.Single().Value); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void Get_ReturnsNullIfNotFound() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var variables = new Variables(hc, new Dictionary()); + + // Act. + string actual = variables.Get("no such"); + + // Assert. + Assert.Equal(null, actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void GetBoolean_DoesNotThrowWhenNull() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var variables = new Variables(hc, new Dictionary()); + + // Act. + bool? actual = variables.GetBoolean("no such"); + + // Assert. + Assert.Null(actual); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void GetEnum_DoesNotThrowWhenNull() + { + using (TestHostContext hc = new TestHostContext(this)) + { + // Arrange. + var variables = new Variables(hc, new Dictionary()); + + // Act. + System.IO.FileShare? actual = variables.GetEnum("no such"); + + // Assert. + Assert.Null(actual); + } + } + } +} diff --git a/src/Test/L0/Worker/WorkerL0.cs b/src/Test/L0/Worker/WorkerL0.cs new file mode 100644 index 00000000000..f0d75ac5c69 --- /dev/null +++ b/src/Test/L0/Worker/WorkerL0.cs @@ -0,0 +1,308 @@ +using GitHub.DistributedTask.WebApi; +using GitHub.DistributedTask.ObjectTemplating.Tokens; +using GitHub.Runner.Worker; +using Moq; +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Xunit; +using GitHub.Services.WebApi; +using Pipelines = GitHub.DistributedTask.Pipelines; + +namespace GitHub.Runner.Common.Tests.Worker +{ + public sealed class WorkerL0 + { + private Mock _processChannel; + private Mock _jobRunner; + private Mock _proxy; + private Mock _cert; + + public WorkerL0() + { + _processChannel = new Mock(); + _jobRunner = new Mock(); + _proxy = new Mock(); + _cert = new Mock(); + } + + private Pipelines.AgentJobRequestMessage CreateJobRequestMessage(string jobName) + { + TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference() { PlanId = Guid.NewGuid() }; + TimelineReference timeline = null; + Dictionary variables = new Dictionary(StringComparer.OrdinalIgnoreCase); + variables[Constants.Variables.System.Culture] = "en-US"; + Pipelines.JobResources resources = new Pipelines.JobResources(); + var serviceEndpoint = new ServiceEndpoint(); + serviceEndpoint.Authorization = new EndpointAuthorization(); + serviceEndpoint.Authorization.Parameters.Add("nullValue", null); + resources.Endpoints.Add(serviceEndpoint); + + List tasks = new List(); + tasks.Add(new Pipelines.TaskStep() + { + Id = Guid.NewGuid(), + Reference = new Pipelines.TaskStepDefinitionReference() + { + Id = Guid.NewGuid(), + Name = "TestTask", + Version = "1.0.0" + } + }); + Guid JobId = Guid.NewGuid(); + var sidecarContainers = new MappingToken(null, null, null) + { + { + new StringToken(null, null, null, "nginx"), + new MappingToken(null, null, null) + { + { + new StringToken(null, null, null, "image"), + new StringToken(null, null, null, "nginx") + }, + } + }, + }; + var context = new Pipelines.ContextData.DictionaryContextData + { + { + "github", + new Pipelines.ContextData.DictionaryContextData() + }, + }; + var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, JobId, jobName, jobName, new StringToken(null, null, null, "ubuntu"), sidecarContainers, null, variables, new List(), resources, context, null, tasks, null); + return jobRequest; + } + + private JobCancelMessage CreateJobCancelMessage(Guid jobId) + { + return new JobCancelMessage(jobId, TimeSpan.FromSeconds(0)); + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void DispatchRunNewJob() + { + //Arrange + using (var hc = new TestHostContext(this)) + using (var tokenSource = new CancellationTokenSource()) + { + var worker = new GitHub.Runner.Worker.Worker(); + hc.EnqueueInstance(_processChannel.Object); + hc.EnqueueInstance(_jobRunner.Object); + hc.SetSingleton(_proxy.Object); + hc.SetSingleton(_cert.Object); + worker.Initialize(hc); + var jobMessage = CreateJobRequestMessage("job1"); + var arWorkerMessages = new WorkerMessage[] + { + new WorkerMessage + { + Body = JsonUtility.ToString(jobMessage), + MessageType = MessageType.NewJobRequest + } + }; + var workerMessages = new Queue(arWorkerMessages); + + _processChannel + .Setup(x => x.ReceiveAsync(It.IsAny())) + .Returns(async () => + { + // Return the job message. + if (workerMessages.Count > 0) + { + return workerMessages.Dequeue(); + } + + // Wait for the text to run + await Task.Delay(-1, tokenSource.Token); + return default(WorkerMessage); + }); + _jobRunner.Setup(x => x.RunAsync(It.IsAny(), It.IsAny())) + .Returns(Task.FromResult(TaskResult.Succeeded)); + + //Act + await worker.RunAsync(pipeIn: "1", pipeOut: "2"); + + //Assert + _processChannel.Verify(x => x.StartClient("1", "2"), Times.Once()); + _jobRunner.Verify(x => x.RunAsync( + It.Is(y => IsMessageIdentical(y, jobMessage)), It.IsAny())); + tokenSource.Cancel(); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public async void DispatchCancellation() + { + //Arrange + using (var hc = new TestHostContext(this)) + { + var worker = new GitHub.Runner.Worker.Worker(); + hc.EnqueueInstance(_processChannel.Object); + hc.EnqueueInstance(_jobRunner.Object); + hc.SetSingleton(_proxy.Object); + hc.SetSingleton(_cert.Object); + worker.Initialize(hc); + var jobMessage = CreateJobRequestMessage("job1"); + var cancelMessage = CreateJobCancelMessage(jobMessage.JobId); + var arWorkerMessages = new WorkerMessage[] + { + new WorkerMessage + { + Body = JsonUtility.ToString(jobMessage), + MessageType = MessageType.NewJobRequest + }, + new WorkerMessage + { + Body = JsonUtility.ToString(cancelMessage), + MessageType = MessageType.CancelRequest + } + + }; + var workerMessages = new Queue(arWorkerMessages); + + _processChannel.Setup(x => x.ReceiveAsync(It.IsAny())) + .Returns(() => Task.FromResult(workerMessages.Dequeue())); + _jobRunner.Setup(x => x.RunAsync(It.IsAny(), It.IsAny())) + .Returns( + async (Pipelines.AgentJobRequestMessage jm, CancellationToken ct) => + { + await Task.Delay(-1, ct); + return TaskResult.Canceled; + }); + + //Act + await Assert.ThrowsAsync( + async () => await worker.RunAsync("1", "2")); + + //Assert + _processChannel.Verify(x => x.StartClient("1", "2"), Times.Once()); + _jobRunner.Verify(x => x.RunAsync( + It.Is(y => IsMessageIdentical(y, jobMessage)), It.IsAny())); + } + } + + [Fact] + [Trait("Level", "L0")] + [Trait("Category", "Worker")] + public void VerifyJobRequestMessagePiiDataIsScrubbed() + { + // Arrange + Pipelines.AgentJobRequestMessage message = CreateJobRequestMessage("jobwithpiidata"); + + // Populate PII variables + foreach (string piiVariable in Variables.PiiVariables) + { + message.Variables.Add(piiVariable, "MyPiiVariable"); + } + + foreach (string piiVariableSuffix in Variables.PiiArtifactVariableSuffixes) + { + message.Variables.Add($"{Variables.PiiArtifactVariablePrefix}.MyArtifact.{piiVariableSuffix}", "MyPiiVariable"); + } + + // Populate the repository PII data + Pipelines.RepositoryResource repository = new Pipelines.RepositoryResource(); + + repository.Properties.Set( + Pipelines.RepositoryPropertyNames.VersionInfo, + new Pipelines.VersionInfo() + { + Author = "MyAuthor", + Message = "MyMessage" + }); + + message.Resources.Repositories.Add(repository); + + // Act + Pipelines.AgentJobRequestMessage scrubbedMessage = WorkerUtilities.ScrubPiiData(message); + + // Assert + foreach (string piiVariable in Variables.PiiVariables) + { + scrubbedMessage.Variables.TryGetValue(piiVariable, out VariableValue value); + + Assert.Equal("[PII]", value.Value); + } + + foreach (string piiVariableSuffix in Variables.PiiArtifactVariableSuffixes) + { + scrubbedMessage.Variables.TryGetValue($"{Variables.PiiArtifactVariablePrefix}.MyArtifact.{piiVariableSuffix}", out VariableValue value); + + Assert.Equal("[PII]", value.Value); + } + + Pipelines.RepositoryResource scrubbedRepo = scrubbedMessage.Resources.Repositories[0]; + Pipelines.VersionInfo scrubbedInfo = scrubbedRepo.Properties.Get(Pipelines.RepositoryPropertyNames.VersionInfo); + + Assert.Equal("[PII]", scrubbedInfo.Author); + } + + private bool IsMessageIdentical(Pipelines.AgentJobRequestMessage source, Pipelines.AgentJobRequestMessage target) + { + if (source == null && target == null) + { + return true; + } + if (source != null && target == null) + { + return false; + } + if (source == null && target != null) + { + return false; + } + if (JsonUtility.ToString(source.JobContainer) != JsonUtility.ToString(target.JobContainer)) + { + return false; + } + if (source.JobDisplayName != target.JobDisplayName) + { + return false; + } + if (source.JobId != target.JobId) + { + return false; + } + if (source.JobName != target.JobName) + { + return false; + } + if (source.MaskHints.Count != target.MaskHints.Count) + { + return false; + } + if (source.MessageType != target.MessageType) + { + return false; + } + if (source.Plan.PlanId != target.Plan.PlanId) + { + return false; + } + if (source.RequestId != target.RequestId) + { + return false; + } + if (source.Resources.Endpoints.Count != target.Resources.Endpoints.Count) + { + return false; + } + if (source.Steps.Count != target.Steps.Count) + { + return false; + } + if (source.Variables.Count != target.Variables.Count) + { + return false; + } + + return true; + } + } +} diff --git a/src/Test/Properties.cs b/src/Test/Properties.cs new file mode 100644 index 00000000000..a4bcec543f2 --- /dev/null +++ b/src/Test/Properties.cs @@ -0,0 +1,3 @@ +using Xunit; + +[assembly: CollectionBehavior(DisableTestParallelization = true)] diff --git a/src/Test/Test.csproj b/src/Test/Test.csproj new file mode 100644 index 00000000000..e915b4e8644 --- /dev/null +++ b/src/Test/Test.csproj @@ -0,0 +1,70 @@ + + + netcoreapp2.2 + win-x64;win-x86;linux-x64;linux-arm;rhel.6-x64;osx-x64 + true + portable-net45+win8 + NU1701;NU1603;NU1603;xUnit2013; + + + + + + + + + + + + + + + + + + + + + + portable + + + + OS_WINDOWS;X64;TRACE + + + OS_WINDOWS;X86;TRACE + + + OS_WINDOWS;X64;DEBUG;TRACE + + + OS_WINDOWS;X86;DEBUG;TRACE + + + + OS_OSX;X64;TRACE + + + OS_OSX;DEBUG;X64;TRACE + + + + OS_LINUX;X64;TRACE + + + OS_LINUX;OS_RHEL6;X64;TRACE + + + OS_LINUX;ARM;TRACE + + + OS_LINUX;X64;DEBUG;TRACE + + + OS_LINUX;OS_RHEL6;X64;DEBUG;TRACE + + + OS_LINUX;ARM;DEBUG;TRACE + + diff --git a/src/Test/TestData/dockerfileaction.yml b/src/Test/TestData/dockerfileaction.yml new file mode 100644 index 00000000000..1052a0d16b3 --- /dev/null +++ b/src/Test/TestData/dockerfileaction.yml @@ -0,0 +1,25 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' + args: + - 'bzz' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar \ No newline at end of file diff --git a/src/Test/TestData/dockerfileaction_arg_env_expression.yml b/src/Test/TestData/dockerfileaction_arg_env_expression.yml new file mode 100644 index 00000000000..64be7490174 --- /dev/null +++ b/src/Test/TestData/dockerfileaction_arg_env_expression.yml @@ -0,0 +1,25 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: '${{ inputs.entryPoint }}' \ No newline at end of file diff --git a/src/Test/TestData/dockerfileaction_cleanup.yml b/src/Test/TestData/dockerfileaction_cleanup.yml new file mode 100644 index 00000000000..e514d25d381 --- /dev/null +++ b/src/Test/TestData/dockerfileaction_cleanup.yml @@ -0,0 +1,27 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' + args: + - 'bzz' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar + post-entrypoint: 'cleanup.sh' + post-if: 'failure()' \ No newline at end of file diff --git a/src/Test/TestData/dockerfileaction_noargs_noenv_noentrypoint.yml b/src/Test/TestData/dockerfileaction_noargs_noenv_noentrypoint.yml new file mode 100644 index 00000000000..e23103cbde1 --- /dev/null +++ b/src/Test/TestData/dockerfileaction_noargs_noenv_noentrypoint.yml @@ -0,0 +1,19 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'Dockerfile' \ No newline at end of file diff --git a/src/Test/TestData/dockerfilerelativeaction.yml b/src/Test/TestData/dockerfilerelativeaction.yml new file mode 100644 index 00000000000..dbcba292741 --- /dev/null +++ b/src/Test/TestData/dockerfilerelativeaction.yml @@ -0,0 +1,25 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'images/Dockerfile' + args: + - '${{ inputs.greeting }}' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar \ No newline at end of file diff --git a/src/Test/TestData/dockerhubaction.yml b/src/Test/TestData/dockerhubaction.yml new file mode 100644 index 00000000000..47e55bbc96a --- /dev/null +++ b/src/Test/TestData/dockerhubaction.yml @@ -0,0 +1,25 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'docker' + image: 'docker://ubuntu:18.04' + args: + - 'bzz' + entrypoint: 'main.sh' + env: + Token: foo + Url: bar \ No newline at end of file diff --git a/src/Test/TestData/nodeaction.yml b/src/Test/TestData/nodeaction.yml new file mode 100644 index 00000000000..616889063fa --- /dev/null +++ b/src/Test/TestData/nodeaction.yml @@ -0,0 +1,20 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + deprecationMessage: 'This property has been deprecated' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node12' + main: 'main.js' \ No newline at end of file diff --git a/src/Test/TestData/nodeaction_cleanup.yml b/src/Test/TestData/nodeaction_cleanup.yml new file mode 100644 index 00000000000..d6d522493fa --- /dev/null +++ b/src/Test/TestData/nodeaction_cleanup.yml @@ -0,0 +1,22 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + deprecationMessage: 'This property has been deprecated' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node12' + main: 'main.js' + post: 'cleanup.js' + post-if: 'cancelled()' \ No newline at end of file diff --git a/src/Test/TestData/noderelativeaction.yml b/src/Test/TestData/noderelativeaction.yml new file mode 100644 index 00000000000..de9b2d4cdf1 --- /dev/null +++ b/src/Test/TestData/noderelativeaction.yml @@ -0,0 +1,19 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + using: 'node12' + main: 'scripts/main.js' \ No newline at end of file diff --git a/src/Test/TestData/pluginaction.yml b/src/Test/TestData/pluginaction.yml new file mode 100644 index 00000000000..64d8afea126 --- /dev/null +++ b/src/Test/TestData/pluginaction.yml @@ -0,0 +1,18 @@ +name: 'Hello World' +description: 'Greet the world and record the time' +author: 'Test Corporation' +inputs: + greeting: # id of input + description: 'The greeting we choose - will print ""{greeting}, World!"" on stdout' + required: true + default: 'Hello' + entryPoint: # id of input + description: 'optional docker entrypoint overwrite.' + required: false +outputs: + time: # id of output + description: 'The time we did the greeting' +icon: 'hello.svg' # vector art to display in the GitHub Marketplace +color: 'green' # optional, decorates the entry in the GitHub Marketplace +runs: + plugin: 'someplugin' \ No newline at end of file diff --git a/src/Trim-Sdk.ps1 b/src/Trim-Sdk.ps1 new file mode 100644 index 00000000000..5b128cb5d98 --- /dev/null +++ b/src/Trim-Sdk.ps1 @@ -0,0 +1,99 @@ +$ErrorActionPreference = "Stop" + +# Push-Location -Path .\Sdk +try +{ + # Generate Namespaces.cs + Write-Host 'Generating Namespaces.cs' + Remove-Item -Path Sdk\Namespaces.cs -ErrorAction Ignore + $namespaces = New-Object System.Collections.Generic.HashSet[string] + $output = findstr /snir /c:"^namespace " Sdk\*.cs + foreach ($outputLine in ($output.Trim().Replace("`r", "").Split("`n"))) + { + $namespace = $outputLine.Trim().Split(':')[-1].Split(' ')[-1] + $namespaces.Add($namespace) | Out-Null + } + + $namespaces = New-Object System.Collections.Generic.List[string]($namespaces) + $namespaces.Sort() + $content = New-Object System.Text.StringBuilder + foreach ($namespace in $namespaces) + { + $content.AppendLine("namespace $namespace") | Out-Null + $content.AppendLine("{") | Out-Null + $content.AppendLine("}") | Out-Null + $content.AppendLine("") | Out-Null + } + [System.IO.File]::WriteAllText("$pwd\Sdk\Namespaces.cs", $content.ToString(), (New-Object System.Text.UTF8Encoding($false))) + + # Gather whitelist of files not to delete + Write-Host 'Gathering whitelist of files not to delete' + $whitelist = New-Object System.Collections.Generic.HashSet[string] + $whitelist.Add(((Resolve-Path -Path Sdk\Namespaces.cs).Path)) | Out-Null + foreach ($file in (Get-ChildItem -Path Sdk\DTExpressions -Recurse -Filter *.cs)) + { + $whitelist.Add($file.FullName) | Out-Null + } + foreach ($file in (Get-ChildItem -Path Sdk\DTLogging -Recurse -Filter *.cs)) + { + $whitelist.Add($file.FullName) | Out-Null + } + foreach ($file in (Get-ChildItem -Path Sdk\DTObjectTemplating -Recurse -Filter *.cs)) + { + $whitelist.Add($file.FullName) | Out-Null + } + foreach ($file in (Get-ChildItem -Path Sdk\DTPipelines\Pipelines\ContextData -Recurse -Filter *.cs)) + { + $whitelist.Add($file.FullName) | Out-Null + } + foreach ($file in (Get-ChildItem -Path Sdk\DTPipelines\Pipelines\ObjectTemplating -Recurse -Filter *.cs)) + { + $whitelist.Add($file.FullName) | Out-Null + } + + # Gather candidate files to delete + Write-Host 'Gathering candidate files to delete' + $candidatePaths = New-Object System.Collections.Generic.List[string] + $deletedPaths = New-Object System.Collections.Generic.List[string] + foreach ($candidateFile in (Get-ChildItem -Path Sdk -Recurse -Filter *.cs)) + { + if (!$whitelist.Contains($candidateFile.FullName) -and (($candidateFile.FullName.IndexOf('\obj\')) -le 0)) + { + $candidatePaths.Add($candidateFile.FullName) + } + } + + while ($true) + { + $found = $false + for ($i = 0; $i -lt $candidatePaths.Count; ) + { + $candidatePath = $candidatePaths[$i] + Write-Host "Checking $candidatePath" + Remove-Item -Path $candidatePath + .\dev.cmd build + if ($LASTEXITCODE -eq 0) + { + $deletedPaths.Add($candidatePath) + $candidatePaths.RemoveAt($i) + Write-Host "Successfully deleted $candidatePath" + $found = $true + } + else + { + Write-Host "Undeleting $candidatePath" + git checkout -- $candidatePath + $i++ + } + } + + if (!$found) + { + break; + } + } +} +finally +{ + # Pop-Location +} \ No newline at end of file diff --git a/src/dev.cmd b/src/dev.cmd new file mode 100644 index 00000000000..0f8a1b57f65 --- /dev/null +++ b/src/dev.cmd @@ -0,0 +1,33 @@ +@setlocal +@echo off + rem Check if SH_PATH is defined. +if defined SH_PATH ( + goto run +) + + rem Attempt to resolve sh.exe from the PATH. +where sh.exe 1>"%TEMP%\where_sh" 2>nul +set /p SH_PATH= < "%TEMP%\where_sh" +del "%TEMP%\where_sh" +if defined SH_PATH ( + goto run +) + + rem Check well-known locations. +set SH_PATH=C:\Program Files\Git\bin\sh.exe +if exist "%SH_PATH%" ( + goto run +) + + rem Check well-known locations. +set SH_PATH=%LOCALAPPDATA%\Programs\Git\bin\sh.exe +if exist "%SH_PATH%" ( + goto run +) + +echo Unable to resolve location of sh.exe. 1>&2 +exit /b 1 + +:run +echo on +"%SH_PATH%" "%~dp0dev.sh" %* diff --git a/src/dev.sh b/src/dev.sh new file mode 100755 index 00000000000..6868a2ffadd --- /dev/null +++ b/src/dev.sh @@ -0,0 +1,250 @@ +#!/bin/bash + +############################################################################### +# +# ./dev.sh build/layout/test/package [Debug/Release] +# +############################################################################### + +set -e + +DEV_CMD=$1 +DEV_CONFIG=$2 + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +LAYOUT_DIR="$SCRIPT_DIR/../_layout" +DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x" +PACKAGE_DIR="$SCRIPT_DIR/../_package" +DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk" +DOTNETSDK_VERSION="2.2.300" +DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION" +RUNNER_VERSION=$(cat runnerversion) + +pushd "$SCRIPT_DIR" + +BUILD_CONFIG="Debug" +if [[ "$DEV_CONFIG" == "Release" ]]; then + BUILD_CONFIG="Release" +fi + +CURRENT_PLATFORM="windows" +if [[ ($(uname) == "Linux") || ($(uname) == "Darwin") ]]; then + CURRENT_PLATFORM=$(uname | awk '{print tolower($0)}') +fi + +if [[ "$CURRENT_PLATFORM" == 'windows' ]]; then + RUNTIME_ID='win-x64' + if [[ "$PROCESSOR_ARCHITECTURE" == 'x86' ]]; then + RUNTIME_ID='win-x86' + fi +elif [[ "$CURRENT_PLATFORM" == 'linux' ]]; then + RUNTIME_ID="linux-x64" + if command -v uname > /dev/null; then + CPU_NAME=$(uname -m) + case $CPU_NAME in + armv7l) RUNTIME_ID="linux-arm";; + aarch64) RUNTIME_ID="linux-arm";; + esac + fi + + if [ -e /etc/redhat-release ]; then + redhatRelease=$(&2 + exit 1 + fi +elif [[ "$CURRENT_PLATFORM" == 'linux' ]]; then + if [[ ("$RUNTIME_ID" != 'linux-x64') && ("$RUNTIME_ID" != 'linux-arm') && ("$RUNTIME_ID" != 'rhel.6-x64') ]]; then + echo "Failed: Can't build $RUNTIME_ID package $CURRENT_PLATFORM" >&2 + exit 1 + fi +elif [[ "$CURRENT_PLATFORM" == 'darwin' ]]; then + if [[ ("$RUNTIME_ID" != 'osx-x64') ]]; then + echo "Failed: Can't build $RUNTIME_ID package $CURRENT_PLATFORM" >&2 + exit 1 + fi +fi + +function failed() +{ + local error=${1:-Undefined error} + echo "Failed: $error" >&2 + popd + exit 1 +} + +function warn() +{ + local error=${1:-Undefined error} + echo "WARNING - FAILED: $error" >&2 +} + +function checkRC() { + local rc=$? + if [ $rc -ne 0 ]; then + failed "${1} Failed with return code $rc" + fi +} + +function heading() +{ + echo + echo + echo "-----------------------------------------" + echo " ${1}" + echo "-----------------------------------------" +} + +function build () +{ + heading "Building ..." + dotnet msbuild -t:Build -p:PackageRuntime="${RUNTIME_ID}" -p:BUILDCONFIG="${BUILD_CONFIG}" -p:AgentVersion="${RUNNER_VERSION}" || failed build +} + +function layout () +{ + heading "Create layout ..." + dotnet msbuild -t:layout -p:PackageRuntime="${RUNTIME_ID}" -p:BUILDCONFIG="${BUILD_CONFIG}" -p:AgentVersion="${RUNNER_VERSION}" || failed build + + #change execution flag to allow running with sudo + if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then + chmod +x "${LAYOUT_DIR}/bin/Runner.Listener" + chmod +x "${LAYOUT_DIR}/bin/Runner.Worker" + chmod +x "${LAYOUT_DIR}/bin/Runner.PluginHost" + chmod +x "${LAYOUT_DIR}/bin/installdependencies.sh" + fi + + heading "Setup externals folder for $RUNTIME_ID runner's layout" + bash ./Misc/externals.sh $RUNTIME_ID || checkRC externals.sh +} + +function runtest () +{ + heading "Testing ..." + + if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then + ulimit -n 1024 + fi + + export GITHUB_RUNNER_SRC_DIR=${SCRIPT_DIR} + + dotnet msbuild -t:test -p:PackageRuntime="${RUNTIME_ID}" -p:BUILDCONFIG="${BUILD_CONFIG}" -p:AgentVersion="${RUNNER_VERSION}" || failed "failed tests" +} + +function package () +{ + if [ ! -d "${LAYOUT_DIR}/bin" ]; then + echo "You must build first. Expecting to find ${LAYOUT_DIR}/bin" + fi + + runner_ver=$("${LAYOUT_DIR}/bin/Runner.Listener" --version) || failed "version" + runner_pkg_name="actions-runner-${RUNTIME_ID}-${runner_ver}" + + heading "Packaging ${runner_pkg_name}" + + rm -Rf "${LAYOUT_DIR:?}/_diag" + find "${LAYOUT_DIR}/bin" -type f -name '*.pdb' -delete + + mkdir -p "$PACKAGE_DIR" + rm -Rf "${PACKAGE_DIR:?}"/* + + pushd "$PACKAGE_DIR" > /dev/null + + if [[ ("$CURRENT_PLATFORM" == "linux") || ("$CURRENT_PLATFORM" == "darwin") ]]; then + tar_name="${runner_pkg_name}.tar.gz" + echo "Creating $tar_name in ${LAYOUT_DIR}" + tar -czf "${tar_name}" -C "${LAYOUT_DIR}" . + elif [[ ("$CURRENT_PLATFORM" == "windows") ]]; then + zip_name="${runner_pkg_name}.zip" + echo "Convert ${LAYOUT_DIR} to Windows style path" + window_path=${LAYOUT_DIR:1} + window_path=${window_path:0:1}:${window_path:1} + echo "Creating $zip_name in ${window_path}" + powershell -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "Add-Type -Assembly \"System.IO.Compression.FileSystem\"; [System.IO.Compression.ZipFile]::CreateFromDirectory(\"${window_path}\", \"${zip_name}\")" + fi + + popd > /dev/null +} + +if [[ (! -d "${DOTNETSDK_INSTALLDIR}") || (! -e "${DOTNETSDK_INSTALLDIR}/.${DOTNETSDK_VERSION}") || (! -e "${DOTNETSDK_INSTALLDIR}/dotnet") ]]; then + + # Download dotnet SDK to ../_dotnetsdk directory + heading "Ensure Dotnet SDK" + + # _dotnetsdk + # \1.0.x + # \dotnet + # \.1.0.x + echo "Download dotnetsdk into ${DOTNETSDK_INSTALLDIR}" + rm -Rf "${DOTNETSDK_DIR}" + + # run dotnet-install.ps1 on windows, dotnet-install.sh on linux + if [[ ("$CURRENT_PLATFORM" == "windows") ]]; then + echo "Convert ${DOTNETSDK_INSTALLDIR} to Windows style path" + sdkinstallwindow_path=${DOTNETSDK_INSTALLDIR:1} + sdkinstallwindow_path=${sdkinstallwindow_path:0:1}:${sdkinstallwindow_path:1} + powershell -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "& \"./Misc/dotnet-install.ps1\" -Version ${DOTNETSDK_VERSION} -InstallDir \"${sdkinstallwindow_path}\" -NoPath; exit \$LastExitCode;" || checkRC dotnet-install.ps1 + else + bash ./Misc/dotnet-install.sh --version ${DOTNETSDK_VERSION} --install-dir "${DOTNETSDK_INSTALLDIR}" --no-path || checkRC dotnet-install.sh + fi + + echo "${DOTNETSDK_VERSION}" > "${DOTNETSDK_INSTALLDIR}/.${DOTNETSDK_VERSION}" +fi + +echo "Prepend ${DOTNETSDK_INSTALLDIR} to %PATH%" +export PATH=${DOTNETSDK_INSTALLDIR}:$PATH + +heading "Github Dreamlifter Runner" + +heading "Dotnet SDK Version" +dotnet --version + +heading "Pre-cache external resources for $RUNTIME_ID package ..." +bash ./Misc/externals.sh $RUNTIME_ID "Pre-Cache" || checkRC "externals.sh Pre-Cache" + +if [[ "$CURRENT_PLATFORM" == 'windows' ]]; then + vswhere=$(find "$DOWNLOAD_DIR" -name vswhere.exe | head -1) + vs_location=$("$vswhere" -prerelease -latest -property installationPath) + msbuild_location="$vs_location""\MSBuild\15.0\Bin\msbuild.exe" + + if [[ ! -e "${msbuild_location}" ]]; then + msbuild_location="$vs_location""\MSBuild\Current\Bin\msbuild.exe" + + if [[ ! -e "${msbuild_location}" ]]; then + failed "Can not find msbuild location, failing build" + fi + fi + + export DesktopMSBuild="$msbuild_location" +fi + +case $DEV_CMD in + "build") build;; + "b") build;; + "test") runtest;; + "t") runtest;; + "layout") layout;; + "l") layout;; + "package") package;; + "p") package;; + *) echo "Invalid cmd. Use build(b), test(t), layout(l) or package(p)";; +esac + +popd +echo +echo Done. +echo diff --git a/src/dir.proj b/src/dir.proj new file mode 100644 index 00000000000..6b0f4a4b230 --- /dev/null +++ b/src/dir.proj @@ -0,0 +1,87 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/global.json b/src/global.json new file mode 100644 index 00000000000..93238784474 --- /dev/null +++ b/src/global.json @@ -0,0 +1,5 @@ +{ + "sdk": { + "version": "2.2.300" + } +} \ No newline at end of file diff --git a/src/runnerversion b/src/runnerversion new file mode 100644 index 00000000000..9541ec81700 --- /dev/null +++ b/src/runnerversion @@ -0,0 +1 @@ +2.159.0 \ No newline at end of file diff --git a/windows.template.yml b/windows.template.yml new file mode 100644 index 00000000000..6e003cc6776 --- /dev/null +++ b/windows.template.yml @@ -0,0 +1,29 @@ +steps: + +# Build agent layout +- script: dev.cmd layout Release + workingDirectory: src + displayName: Build & Layout Release + +# Run test +- script: dev.cmd test + workingDirectory: src + displayName: Test + +# # Publish test results +# - task: PublishTestResults@2 +# displayName: Publish Test Results **/*.trx +# condition: always() +# inputs: +# testRunner: VSTest +# testResultsFiles: '**/*.trx' +# testRunTitle: 'Agent Tests' + +# # Upload test log +# - task: PublishBuildArtifacts@1 +# displayName: Publish Test logs +# condition: always() +# inputs: +# pathToPublish: src/Test/TestLogs +# artifactName: $(System.JobId) +# artifactType: container